From 22aa40bb6d566e603c1fd5d8c00a3307f53f3301 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 1 Jul 2016 12:18:56 +0200 Subject: [PATCH 01/93] Build: add apache async http client dependencies --- client/rest/build.gradle | 2 + .../licenses/httpasyncclient-4.1.2.jar.sha1 | 1 + .../rest/licenses/httpasyncclient-LICENSE.txt | 558 ++++++++++++++++++ .../rest/licenses/httpasyncclient-NOTICE.txt | 6 + client/rest/licenses/httpcore-4.4.4.jar.sha1 | 1 - client/rest/licenses/httpcore-4.4.5.jar.sha1 | 1 + .../rest/licenses/httpcore-nio-4.4.5.jar.sha1 | 1 + client/rest/licenses/httpcore-nio-LICENSE.txt | 558 ++++++++++++++++++ client/rest/licenses/httpcore-nio-NOTICE.txt | 6 + .../sniffer/licenses/httpcore-4.4.4.jar.sha1 | 1 - .../sniffer/licenses/httpcore-4.4.5.jar.sha1 | 1 + 11 files changed, 1134 insertions(+), 2 deletions(-) create mode 100644 client/rest/licenses/httpasyncclient-4.1.2.jar.sha1 create mode 100644 client/rest/licenses/httpasyncclient-LICENSE.txt create mode 100644 client/rest/licenses/httpasyncclient-NOTICE.txt delete mode 100644 client/rest/licenses/httpcore-4.4.4.jar.sha1 create mode 100644 client/rest/licenses/httpcore-4.4.5.jar.sha1 create mode 100644 client/rest/licenses/httpcore-nio-4.4.5.jar.sha1 create mode 100644 client/rest/licenses/httpcore-nio-LICENSE.txt create mode 100644 client/rest/licenses/httpcore-nio-NOTICE.txt delete mode 100644 client/sniffer/licenses/httpcore-4.4.4.jar.sha1 create mode 100644 client/sniffer/licenses/httpcore-4.4.5.jar.sha1 diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 4623fbd8c2c..4673424241f 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -31,6 +31,8 @@ group = 'org.elasticsearch.client' dependencies { compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" + compile "org.apache.httpcomponents:httpasyncclient:4.1.2" + compile "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}" compile "commons-codec:commons-codec:${versions.commonscodec}" compile "commons-logging:commons-logging:${versions.commonslogging}" diff --git a/client/rest/licenses/httpasyncclient-4.1.2.jar.sha1 b/client/rest/licenses/httpasyncclient-4.1.2.jar.sha1 new file mode 100644 index 00000000000..065ed920a17 --- /dev/null +++ b/client/rest/licenses/httpasyncclient-4.1.2.jar.sha1 @@ -0,0 +1 @@ +95aa3e6fb520191a0970a73cf09f62948ee614be \ No newline at end of file diff --git a/client/rest/licenses/httpasyncclient-LICENSE.txt b/client/rest/licenses/httpasyncclient-LICENSE.txt new file mode 100644 index 00000000000..32f01eda18f --- /dev/null +++ b/client/rest/licenses/httpasyncclient-LICENSE.txt @@ -0,0 +1,558 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +========================================================================= + +This project includes Public Suffix List copied from + +licensed under the terms of the Mozilla Public License, v. 2.0 + +Full license text: + +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/client/rest/licenses/httpasyncclient-NOTICE.txt b/client/rest/licenses/httpasyncclient-NOTICE.txt new file mode 100644 index 00000000000..91e5c40c4c6 --- /dev/null +++ b/client/rest/licenses/httpasyncclient-NOTICE.txt @@ -0,0 +1,6 @@ +Apache HttpComponents Client +Copyright 1999-2016 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + diff --git a/client/rest/licenses/httpcore-4.4.4.jar.sha1 b/client/rest/licenses/httpcore-4.4.4.jar.sha1 deleted file mode 100644 index ef0c257e012..00000000000 --- a/client/rest/licenses/httpcore-4.4.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/client/rest/licenses/httpcore-4.4.5.jar.sha1 b/client/rest/licenses/httpcore-4.4.5.jar.sha1 new file mode 100644 index 00000000000..58172660174 --- /dev/null +++ b/client/rest/licenses/httpcore-4.4.5.jar.sha1 @@ -0,0 +1 @@ +e7501a1b34325abb00d17dde96150604a0658b54 \ No newline at end of file diff --git a/client/rest/licenses/httpcore-nio-4.4.5.jar.sha1 b/client/rest/licenses/httpcore-nio-4.4.5.jar.sha1 new file mode 100644 index 00000000000..d6a80bf100d --- /dev/null +++ b/client/rest/licenses/httpcore-nio-4.4.5.jar.sha1 @@ -0,0 +1 @@ +f4be009e7505f6ceddf21e7960c759f413f15056 \ No newline at end of file diff --git a/client/rest/licenses/httpcore-nio-LICENSE.txt b/client/rest/licenses/httpcore-nio-LICENSE.txt new file mode 100644 index 00000000000..32f01eda18f --- /dev/null +++ b/client/rest/licenses/httpcore-nio-LICENSE.txt @@ -0,0 +1,558 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +========================================================================= + +This project includes Public Suffix List copied from + +licensed under the terms of the Mozilla Public License, v. 2.0 + +Full license text: + +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/client/rest/licenses/httpcore-nio-NOTICE.txt b/client/rest/licenses/httpcore-nio-NOTICE.txt new file mode 100644 index 00000000000..91e5c40c4c6 --- /dev/null +++ b/client/rest/licenses/httpcore-nio-NOTICE.txt @@ -0,0 +1,6 @@ +Apache HttpComponents Client +Copyright 1999-2016 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + diff --git a/client/sniffer/licenses/httpcore-4.4.4.jar.sha1 b/client/sniffer/licenses/httpcore-4.4.4.jar.sha1 deleted file mode 100644 index ef0c257e012..00000000000 --- a/client/sniffer/licenses/httpcore-4.4.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/client/sniffer/licenses/httpcore-4.4.5.jar.sha1 b/client/sniffer/licenses/httpcore-4.4.5.jar.sha1 new file mode 100644 index 00000000000..58172660174 --- /dev/null +++ b/client/sniffer/licenses/httpcore-4.4.5.jar.sha1 @@ -0,0 +1 @@ +e7501a1b34325abb00d17dde96150604a0658b54 \ No newline at end of file From 118a14fbe3347b629901a024ed92c4f96427a713 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 1 Jul 2016 12:19:37 +0200 Subject: [PATCH 02/93] Build: upgrade httpcore version to 4.4.5 Closes #19127 --- buildSrc/version.properties | 2 +- modules/reindex/licenses/httpcore-4.4.4.jar.sha1 | 1 - modules/reindex/licenses/httpcore-4.4.5.jar.sha1 | 1 + .../discovery-azure-classic/licenses/httpcore-4.4.4.jar.sha1 | 1 - .../discovery-azure-classic/licenses/httpcore-4.4.5.jar.sha1 | 1 + plugins/discovery-ec2/licenses/httpcore-4.4.4.jar.sha1 | 1 - plugins/discovery-ec2/licenses/httpcore-4.4.5.jar.sha1 | 1 + plugins/discovery-gce/licenses/httpcore-4.4.4.jar.sha1 | 1 - plugins/discovery-gce/licenses/httpcore-4.4.5.jar.sha1 | 1 + plugins/repository-gcs/licenses/httpcore-4.4.4.jar.sha1 | 1 - plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 | 1 + plugins/repository-s3/licenses/httpcore-4.4.4.jar.sha1 | 1 - plugins/repository-s3/licenses/httpcore-4.4.5.jar.sha1 | 1 + 13 files changed, 7 insertions(+), 7 deletions(-) delete mode 100644 modules/reindex/licenses/httpcore-4.4.4.jar.sha1 create mode 100644 modules/reindex/licenses/httpcore-4.4.5.jar.sha1 delete mode 100644 plugins/discovery-azure-classic/licenses/httpcore-4.4.4.jar.sha1 create mode 100644 plugins/discovery-azure-classic/licenses/httpcore-4.4.5.jar.sha1 delete mode 100644 plugins/discovery-ec2/licenses/httpcore-4.4.4.jar.sha1 create mode 100644 plugins/discovery-ec2/licenses/httpcore-4.4.5.jar.sha1 delete mode 100644 plugins/discovery-gce/licenses/httpcore-4.4.4.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/httpcore-4.4.5.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/httpcore-4.4.4.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/httpcore-4.4.4.jar.sha1 create mode 100644 plugins/repository-s3/licenses/httpcore-4.4.5.jar.sha1 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 7565488d4ab..9fb8d89e824 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -13,7 +13,7 @@ jna = 4.2.2 randomizedrunner = 2.3.2 junit = 4.11 httpclient = 4.5.2 -httpcore = 4.4.4 +httpcore = 4.4.5 commonslogging = 1.1.3 commonscodec = 1.10 hamcrest = 1.3 diff --git a/modules/reindex/licenses/httpcore-4.4.4.jar.sha1 b/modules/reindex/licenses/httpcore-4.4.4.jar.sha1 deleted file mode 100644 index ef0c257e012..00000000000 --- a/modules/reindex/licenses/httpcore-4.4.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/modules/reindex/licenses/httpcore-4.4.5.jar.sha1 b/modules/reindex/licenses/httpcore-4.4.5.jar.sha1 new file mode 100644 index 00000000000..58172660174 --- /dev/null +++ b/modules/reindex/licenses/httpcore-4.4.5.jar.sha1 @@ -0,0 +1 @@ +e7501a1b34325abb00d17dde96150604a0658b54 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/licenses/httpcore-4.4.4.jar.sha1 b/plugins/discovery-azure-classic/licenses/httpcore-4.4.4.jar.sha1 deleted file mode 100644 index ef0c257e012..00000000000 --- a/plugins/discovery-azure-classic/licenses/httpcore-4.4.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/licenses/httpcore-4.4.5.jar.sha1 b/plugins/discovery-azure-classic/licenses/httpcore-4.4.5.jar.sha1 new file mode 100644 index 00000000000..58172660174 --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/httpcore-4.4.5.jar.sha1 @@ -0,0 +1 @@ +e7501a1b34325abb00d17dde96150604a0658b54 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/httpcore-4.4.4.jar.sha1 b/plugins/discovery-ec2/licenses/httpcore-4.4.4.jar.sha1 deleted file mode 100644 index ef0c257e012..00000000000 --- a/plugins/discovery-ec2/licenses/httpcore-4.4.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/httpcore-4.4.5.jar.sha1 b/plugins/discovery-ec2/licenses/httpcore-4.4.5.jar.sha1 new file mode 100644 index 00000000000..58172660174 --- /dev/null +++ b/plugins/discovery-ec2/licenses/httpcore-4.4.5.jar.sha1 @@ -0,0 +1 @@ +e7501a1b34325abb00d17dde96150604a0658b54 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/httpcore-4.4.4.jar.sha1 b/plugins/discovery-gce/licenses/httpcore-4.4.4.jar.sha1 deleted file mode 100644 index ef0c257e012..00000000000 --- a/plugins/discovery-gce/licenses/httpcore-4.4.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/httpcore-4.4.5.jar.sha1 b/plugins/discovery-gce/licenses/httpcore-4.4.5.jar.sha1 new file mode 100644 index 00000000000..58172660174 --- /dev/null +++ b/plugins/discovery-gce/licenses/httpcore-4.4.5.jar.sha1 @@ -0,0 +1 @@ +e7501a1b34325abb00d17dde96150604a0658b54 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpcore-4.4.4.jar.sha1 b/plugins/repository-gcs/licenses/httpcore-4.4.4.jar.sha1 deleted file mode 100644 index ef0c257e012..00000000000 --- a/plugins/repository-gcs/licenses/httpcore-4.4.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 b/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 new file mode 100644 index 00000000000..58172660174 --- /dev/null +++ b/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 @@ -0,0 +1 @@ +e7501a1b34325abb00d17dde96150604a0658b54 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/httpcore-4.4.4.jar.sha1 b/plugins/repository-s3/licenses/httpcore-4.4.4.jar.sha1 deleted file mode 100644 index ef0c257e012..00000000000 --- a/plugins/repository-s3/licenses/httpcore-4.4.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/httpcore-4.4.5.jar.sha1 b/plugins/repository-s3/licenses/httpcore-4.4.5.jar.sha1 new file mode 100644 index 00000000000..58172660174 --- /dev/null +++ b/plugins/repository-s3/licenses/httpcore-4.4.5.jar.sha1 @@ -0,0 +1 @@ +e7501a1b34325abb00d17dde96150604a0658b54 \ No newline at end of file From 1fbec71243c8416542bc55e44bc0d1878fe6af28 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jul 2016 18:22:25 +0200 Subject: [PATCH 03/93] Rest client: introduce async performRequest method and use async client under the hood for sync requests too The new method accepts the usual parameters (method, endpoint, params, entity and headers) plus a response listener and an async response consumer. Shortcut methods are also added that don't require params, entity and the async response consumer optional. There are a few relevant api changes as a consequence of the move to async client that affect sync methods: - Response doesn't implement Closeable anymore, responses don't need to be closed - performRequest throws Exception rather than just IOException, as that is the the exception that we get from the FutureCallback#failed method in the async http client - ssl configuration is a bit simpler, one only needs to call setSSLStrategy from a custom HttpClientConfigCallback, that doesn't end up overridng any other default around connection pooling (it used to happen with the sync client and make ssl configuration more complex) Relates to #19055 --- .../elasticsearch/client/RequestLogger.java | 2 +- .../org/elasticsearch/client/Response.java | 14 +- .../client/ResponseException.java | 28 +- .../client/ResponseListener.java} | 32 +- .../org/elasticsearch/client/RestClient.java | 373 +++++++++++++----- .../SSLSocketFactoryHttpConfigCallback.java | 53 --- .../client/RestClientBuilderTests.java | 4 +- .../client/RestClientIntegTests.java | 21 +- .../client/RestClientMultipleHostsTests.java | 62 +-- .../client/RestClientSingleHostTests.java | 106 ++--- .../client/TrackingFailureListener.java | 5 +- .../client/sniff/HostsSniffer.java | 7 +- .../client/sniff/SniffOnFailureListener.java | 3 +- .../client/sniff/HostsSnifferTests.java | 3 +- .../sniff/SniffOnFailureListenerTests.java | 17 +- .../remote/RemoteScrollableHitSource.java | 7 +- .../http/ContextAndHeaderTransportIT.java | 21 +- .../org/elasticsearch/http/CorsNotSetIT.java | 21 +- .../org/elasticsearch/http/CorsRegexIT.java | 41 +- .../elasticsearch/http/DeprecationHttpIT.java | 71 ++-- .../http/DetailedErrorsDisabledIT.java | 4 +- .../http/DetailedErrorsEnabledIT.java | 11 +- .../elasticsearch/http/HttpCompressionIT.java | 104 +---- .../http/ResponseHeaderPluginIT.java | 7 +- .../test/rest/ESRestTestCase.java | 6 +- .../test/rest/RestTestExecutionContext.java | 8 +- .../test/rest/client/RestTestClient.java | 15 +- .../test/rest/client/RestTestResponse.java | 10 - .../test/rest/section/DoSection.java | 4 +- .../test/rest/section/ExecutableSection.java | 4 +- 30 files changed, 522 insertions(+), 542 deletions(-) rename client/rest/src/{test/java/org/elasticsearch/client/CloseableBasicHttpResponse.java => main/java/org/elasticsearch/client/ResponseListener.java} (51%) delete mode 100644 client/rest/src/main/java/org/elasticsearch/client/SSLSocketFactoryHttpConfigCallback.java diff --git a/client/rest/src/main/java/org/elasticsearch/client/RequestLogger.java b/client/rest/src/main/java/org/elasticsearch/client/RequestLogger.java index 24e6881fa1e..fd4c3600234 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RequestLogger.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RequestLogger.java @@ -81,7 +81,7 @@ final class RequestLogger { /** * Logs a request that failed */ - static void logFailedRequest(Log logger, HttpUriRequest request, HttpHost host, IOException e) { + static void logFailedRequest(Log logger, HttpUriRequest request, HttpHost host, Exception e) { if (logger.isDebugEnabled()) { logger.debug("request [" + request.getMethod() + " " + host + getUri(request.getRequestLine()) + "] failed", e); } diff --git a/client/rest/src/main/java/org/elasticsearch/client/Response.java b/client/rest/src/main/java/org/elasticsearch/client/Response.java index f7685b27bb9..be2a16f912d 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/Response.java +++ b/client/rest/src/main/java/org/elasticsearch/client/Response.java @@ -22,12 +22,11 @@ package org.elasticsearch.client; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; +import org.apache.http.HttpResponse; import org.apache.http.RequestLine; import org.apache.http.StatusLine; import org.apache.http.client.methods.CloseableHttpResponse; -import java.io.Closeable; -import java.io.IOException; import java.util.Objects; /** @@ -35,13 +34,13 @@ import java.util.Objects; * its corresponding {@link RequestLine} and {@link HttpHost}. * It must be closed to free any resource held by it, as well as the corresponding connection in the connection pool. */ -public class Response implements Closeable { +public class Response { private final RequestLine requestLine; private final HttpHost host; - private final CloseableHttpResponse response; + private final HttpResponse response; - Response(RequestLine requestLine, HttpHost host, CloseableHttpResponse response) { + Response(RequestLine requestLine, HttpHost host, HttpResponse response) { Objects.requireNonNull(requestLine, "requestLine cannot be null"); Objects.requireNonNull(host, "node cannot be null"); Objects.requireNonNull(response, "response cannot be null"); @@ -107,9 +106,4 @@ public class Response implements Closeable { ", response=" + response.getStatusLine() + '}'; } - - @Override - public void close() throws IOException { - this.response.close(); - } } diff --git a/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java b/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java index 44f59cce7db..5b6c4f1f0e7 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java +++ b/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java @@ -23,44 +23,26 @@ import java.io.IOException; /** * Exception thrown when an elasticsearch node responds to a request with a status code that indicates an error. - * Note that the response body gets passed in as a string and read eagerly, which means that the Response object - * is expected to be closed and available only to read metadata like status line, request line, response headers. + * Holds the response that was returned. */ public class ResponseException extends IOException { private Response response; - private final String responseBody; - ResponseException(Response response, String responseBody) throws IOException { - super(buildMessage(response,responseBody)); + ResponseException(Response response) throws IOException { + super(buildMessage(response)); this.response = response; - this.responseBody = responseBody; } - private static String buildMessage(Response response, String responseBody) { - String message = response.getRequestLine().getMethod() + " " + response.getHost() + response.getRequestLine().getUri() + private static String buildMessage(Response response) { + return response.getRequestLine().getMethod() + " " + response.getHost() + response.getRequestLine().getUri() + ": " + response.getStatusLine().toString(); - if (responseBody != null) { - message += "\n" + responseBody; - } - return message; } /** * Returns the {@link Response} that caused this exception to be thrown. - * Expected to be used only to read metadata like status line, request line, response headers. The response body should - * be retrieved using {@link #getResponseBody()} */ public Response getResponse() { return response; } - - /** - * Returns the response body as a string or null if there wasn't any. - * The body is eagerly consumed when an ResponseException gets created, and its corresponding Response - * gets closed straightaway so this method is the only way to get back the response body that was returned. - */ - public String getResponseBody() { - return responseBody; - } } diff --git a/client/rest/src/test/java/org/elasticsearch/client/CloseableBasicHttpResponse.java b/client/rest/src/main/java/org/elasticsearch/client/ResponseListener.java similarity index 51% rename from client/rest/src/test/java/org/elasticsearch/client/CloseableBasicHttpResponse.java rename to client/rest/src/main/java/org/elasticsearch/client/ResponseListener.java index dd866bac541..ce948f6569b 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/CloseableBasicHttpResponse.java +++ b/client/rest/src/main/java/org/elasticsearch/client/ResponseListener.java @@ -19,24 +19,22 @@ package org.elasticsearch.client; -import org.apache.http.StatusLine; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.message.BasicHttpResponse; - -import java.io.IOException; - /** - * Simple {@link CloseableHttpResponse} impl needed to easily create http responses that are closeable given that - * org.apache.http.impl.execchain.HttpResponseProxy is not public. + * Listener to be provided when calling async performRequest methods provided by {@link RestClient}. + * Those methods that do accept a listener will return immediately, execute asynchronously, and notify + * the listener whenever the request yielded a response, or failed with an exception. */ -class CloseableBasicHttpResponse extends BasicHttpResponse implements CloseableHttpResponse { +public interface ResponseListener { - public CloseableBasicHttpResponse(StatusLine statusline) { - super(statusline); - } + /** + * Method invoked if the request yielded a successful response + */ + void onSuccess(Response response); - @Override - public void close() throws IOException { - //nothing to close - } -} \ No newline at end of file + /** + * Method invoked if the request failed. There are two main categories of failures: connection failures (usually + * {@link java.io.IOException}s, or responses that were treated as errors based on their error response code + * ({@link ResponseException}s). + */ + void onFailure(Exception exception); +} diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 0d6941c7857..b5bfd6ee07c 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -25,9 +25,9 @@ import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpRequest; +import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.config.RequestConfig; -import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpOptions; @@ -37,11 +37,16 @@ import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.methods.HttpTrace; import org.apache.http.client.utils.URIBuilder; +import org.apache.http.concurrent.FutureCallback; import org.apache.http.entity.ContentType; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; -import org.apache.http.util.EntityUtils; +import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; +import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; +import org.apache.http.nio.client.methods.HttpAsyncMethods; +import org.apache.http.nio.conn.SchemeIOSessionStrategy; +import org.apache.http.nio.protocol.HttpAsyncRequestProducer; +import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; import java.io.Closeable; import java.io.IOException; @@ -51,6 +56,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; @@ -58,6 +64,7 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -79,9 +86,8 @@ public final class RestClient implements Closeable { private static final Log logger = LogFactory.getLog(RestClient.class); public static ContentType JSON_CONTENT_TYPE = ContentType.create("application/json", Consts.UTF_8); - private final CloseableHttpClient client; - //we don't rely on default headers supported by HttpClient as those cannot be replaced, plus it would get hairy - //when we create the HttpClient instance on our own as there would be two different ways to set the default headers. + private final CloseableHttpAsyncClient client; + //we don't rely on default headers supported by HttpAsyncClient as those cannot be replaced private final Header[] defaultHeaders; private final long maxRetryTimeoutMillis; private final AtomicInteger lastHostIndex = new AtomicInteger(0); @@ -89,7 +95,7 @@ public final class RestClient implements Closeable { private final ConcurrentMap blacklist = new ConcurrentHashMap<>(); private final FailureListener failureListener; - RestClient(CloseableHttpClient client, long maxRetryTimeoutMillis, Header[] defaultHeaders, + RestClient(CloseableHttpAsyncClient client, long maxRetryTimeoutMillis, Header[] defaultHeaders, HttpHost[] hosts, FailureListener failureListener) { this.client = client; this.maxRetryTimeoutMillis = maxRetryTimeoutMillis; @@ -127,8 +133,8 @@ public final class RestClient implements Closeable { * @throws ClientProtocolException in case of an http protocol error * @throws ResponseException in case elasticsearch responded with a status code that indicated an error */ - public Response performRequest(String method, String endpoint, Header... headers) throws IOException { - return performRequest(method, endpoint, Collections.emptyMap(), null, headers); + public Response performRequest(String method, String endpoint, Header... headers) throws Exception { + return performRequest(method, endpoint, Collections.emptyMap(), (HttpEntity)null, headers); } /** @@ -144,16 +150,16 @@ public final class RestClient implements Closeable { * @throws ClientProtocolException in case of an http protocol error * @throws ResponseException in case elasticsearch responded with a status code that indicated an error */ - public Response performRequest(String method, String endpoint, Map params, Header... headers) throws IOException { - return performRequest(method, endpoint, params, null, headers); + public Response performRequest(String method, String endpoint, Map params, Header... headers) throws Exception { + return performRequest(method, endpoint, params, (HttpEntity)null, headers); } /** - * Sends a request to the elasticsearch cluster that the current client points to. - * Selects a host out of the provided ones in a round-robin fashion. Failing hosts are marked dead and retried after a certain - * amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times they previously failed (the more failures, - * the later they will be retried). In case of failures all of the alive nodes (or dead nodes that deserve a retry) are retried - * till one responds or none of them does, in which case an {@link IOException} will be thrown. + * Sends a request to the elasticsearch cluster that the current client points to. Blocks till the request is completed and returns + * its response of fails by throwing an exception. Selects a host out of the provided ones in a round-robin fashion. Failing hosts + * are marked dead and retried after a certain amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times + * they previously failed (the more failures, the later they will be retried). In case of failures all of the alive nodes (or dead + * nodes that deserve a retry) are retried till one responds or none of them does, in which case an {@link IOException} will be thrown. * * @param method the http method * @param endpoint the path of the request (without host and port) @@ -166,73 +172,159 @@ public final class RestClient implements Closeable { * @throws ResponseException in case elasticsearch responded with a status code that indicated an error */ public Response performRequest(String method, String endpoint, Map params, - HttpEntity entity, Header... headers) throws IOException { + HttpEntity entity, Header... headers) throws Exception { + HttpAsyncResponseConsumer consumer = HttpAsyncMethods.createConsumer(); + SyncResponseListener listener = new SyncResponseListener(); + performRequest(method, endpoint, params, entity, consumer, listener, headers); + return listener.get(); + } + + /** + * Sends a request to the elasticsearch cluster that the current client points to. + * Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumer, ResponseListener, Header...)} + * but without parameters, request body and async response consumer. A default response consumer, specifically an instance of + * ({@link org.apache.http.nio.protocol.BasicAsyncResponseConsumer} will be created and used. + * + * @param method the http method + * @param endpoint the path of the request (without host and port) + * @param responseListener the {@link ResponseListener} to notify when the request is completed or fails + * @param headers the optional request headers + */ + public void performRequest(String method, String endpoint, ResponseListener responseListener, Header... headers) { + performRequest(method, endpoint, Collections.emptyMap(), null, responseListener, headers); + } + + /** + * Sends a request to the elasticsearch cluster that the current client points to. + * Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumer, ResponseListener, Header...)} + * but without request body and async response consumer. A default response consumer, specifically an instance of + * ({@link org.apache.http.nio.protocol.BasicAsyncResponseConsumer} will be created and used. + * + * @param method the http method + * @param endpoint the path of the request (without host and port) + * @param params the query_string parameters + * @param responseListener the {@link ResponseListener} to notify when the request is completed or fails + * @param headers the optional request headers + */ + public void performRequest(String method, String endpoint, Map params, + ResponseListener responseListener, Header... headers) { + performRequest(method, endpoint, params, null, responseListener, headers); + } + + /** + /** + * Sends a request to the elasticsearch cluster that the current client points to. + * Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumer, ResponseListener, Header...)} + * but without an async response consumer, meaning that a {@link org.apache.http.nio.protocol.BasicAsyncResponseConsumer} + * will be created and used. + * + * @param method the http method + * @param endpoint the path of the request (without host and port) + * @param params the query_string parameters + * @param entity the body of the request, null if not applicable + * @param responseListener the {@link ResponseListener} to notify when the request is completed or fails + * @param headers the optional request headers + */ + public void performRequest(String method, String endpoint, Map params, + HttpEntity entity, ResponseListener responseListener, Header... headers) { + HttpAsyncResponseConsumer responseConsumer = HttpAsyncMethods.createConsumer(); + performRequest(method, endpoint, params, entity, responseConsumer, responseListener, headers); + } + + /** + * Sends a request to the elasticsearch cluster that the current client points to. The request is executed asynchronously + * and the provided {@link ResponseListener} gets notified whenever it is completed or it fails. + * Selects a host out of the provided ones in a round-robin fashion. Failing hosts are marked dead and retried after a certain + * amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times they previously failed (the more failures, + * the later they will be retried). In case of failures all of the alive nodes (or dead nodes that deserve a retry) are retried + * till one responds or none of them does, in which case an {@link IOException} will be thrown. + * + * @param method the http method + * @param endpoint the path of the request (without host and port) + * @param params the query_string parameters + * @param entity the body of the request, null if not applicable + * @param responseConsumer the {@link HttpAsyncResponseConsumer} callback + * @param responseListener the {@link ResponseListener} to notify when the request is completed or fails + * @param headers the optional request headers + */ + public void performRequest(String method, String endpoint, Map params, + HttpEntity entity, HttpAsyncResponseConsumer responseConsumer, + ResponseListener responseListener, Header... headers) { URI uri = buildUri(endpoint, params); HttpRequestBase request = createHttpRequest(method, uri, entity); setHeaders(request, headers); - //we apply a soft margin so that e.g. if a request took 59 seconds and timeout is set to 60 we don't do another attempt - long retryTimeoutMillis = Math.round(this.maxRetryTimeoutMillis / (float)100 * 98); - IOException lastSeenException = null; + FailureTrackingListener failureTrackingListener = new FailureTrackingListener(responseListener); long startTime = System.nanoTime(); - for (HttpHost host : nextHost()) { - if (lastSeenException != null) { - //in case we are retrying, check whether maxRetryTimeout has been reached, in which case an exception will be thrown - long timeElapsedMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime); - long timeout = retryTimeoutMillis - timeElapsedMillis; - if (timeout <= 0) { - IOException retryTimeoutException = new IOException( - "request retries exceeded max retry timeout [" + retryTimeoutMillis + "]"); - retryTimeoutException.addSuppressed(lastSeenException); - throw retryTimeoutException; + performRequest(startTime, nextHost().iterator(), request, responseConsumer, failureTrackingListener); + } + + private void performRequest(final long startTime, final Iterator hosts, final HttpRequestBase request, + final HttpAsyncResponseConsumer responseConsumer, + final FailureTrackingListener listener) { + final HttpHost host = hosts.next(); + //we stream the request body if the entity allows for it + HttpAsyncRequestProducer requestProducer = HttpAsyncMethods.create(host, request); + client.execute(requestProducer, responseConsumer, new FutureCallback() { + @Override + public void completed(HttpResponse httpResponse) { + try { + RequestLogger.logResponse(logger, request, host, httpResponse); + int statusCode = httpResponse.getStatusLine().getStatusCode(); + Response response = new Response(request.getRequestLine(), host, httpResponse); + if (isSuccessfulResponse(request.getMethod(), statusCode)) { + onResponse(host); + listener.onSuccess(response); + } else { + ResponseException responseException = new ResponseException(response); + if (mustRetry(statusCode)) { + //mark host dead and retry against next one + onFailure(host); + retryIfPossible(responseException, hosts, request); + } else { + //mark host alive and don't retry, as the error should be a request problem + onResponse(host); + listener.onDefinitiveFailure(responseException); + } + } + } catch(Exception e) { + listener.onDefinitiveFailure(e); } - //also reset the request to make it reusable for the next attempt - request.reset(); } - CloseableHttpResponse httpResponse; - try { - httpResponse = client.execute(host, request); - } catch(IOException e) { - RequestLogger.logFailedRequest(logger, request, host, e); - onFailure(host); - lastSeenException = addSuppressedException(lastSeenException, e); - continue; - } - Response response = new Response(request.getRequestLine(), host, httpResponse); - int statusCode = response.getStatusLine().getStatusCode(); - if (statusCode < 300 || (request.getMethod().equals(HttpHead.METHOD_NAME) && statusCode == 404) ) { - RequestLogger.logResponse(logger, request, host, httpResponse); - onResponse(host); - return response; - } - RequestLogger.logResponse(logger, request, host, httpResponse); - String responseBody; - try { - if (response.getEntity() == null) { - responseBody = null; - } else { - responseBody = EntityUtils.toString(response.getEntity()); - } - } finally { - response.close(); - } - lastSeenException = addSuppressedException(lastSeenException, new ResponseException(response, responseBody)); - switch(statusCode) { - case 502: - case 503: - case 504: - //mark host dead and retry against next one + @Override + public void failed(Exception failure) { + try { + RequestLogger.logFailedRequest(logger, request, host, failure); onFailure(host); - break; - default: - //mark host alive and don't retry, as the error should be a request problem - onResponse(host); - throw lastSeenException; + retryIfPossible(failure, hosts, request); + } catch(Exception e) { + listener.onDefinitiveFailure(e); + } } - } - //we get here only when we tried all nodes and they all failed - assert lastSeenException != null; - throw lastSeenException; + + private void retryIfPossible(Exception exception, Iterator hosts, HttpRequestBase request) { + if (hosts.hasNext()) { + //in case we are retrying, check whether maxRetryTimeout has been reached + long timeElapsedMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime); + long timeout = maxRetryTimeoutMillis - timeElapsedMillis; + if (timeout <= 0) { + IOException retryTimeoutException = new IOException( + "request retries exceeded max retry timeout [" + maxRetryTimeoutMillis + "]"); + listener.onDefinitiveFailure(retryTimeoutException); + } else { + listener.trackFailure(exception); + request.reset(); + performRequest(startTime, hosts, request, responseConsumer, listener); + } + } else { + listener.onDefinitiveFailure(exception); + } + } + + @Override + public void cancelled() { + } + }); } private void setHeaders(HttpRequest httpRequest, Header[] requestHeaders) { @@ -316,7 +408,21 @@ public final class RestClient implements Closeable { client.close(); } - private static IOException addSuppressedException(IOException suppressedException, IOException currentException) { + private static boolean isSuccessfulResponse(String method, int statusCode) { + return statusCode < 300 || (HttpHead.METHOD_NAME.equals(method) && statusCode == 404); + } + + private static boolean mustRetry(int statusCode) { + switch(statusCode) { + case 502: + case 503: + case 504: + return true; + } + return false; + } + + private static Exception addSuppressedException(Exception suppressedException, Exception currentException) { if (suppressedException != null) { currentException.addSuppressed(suppressedException); } @@ -372,6 +478,57 @@ public final class RestClient implements Closeable { } } + private static class FailureTrackingListener { + private final ResponseListener responseListener; + private volatile Exception exception; + + FailureTrackingListener(ResponseListener responseListener) { + this.responseListener = responseListener; + } + + void onSuccess(Response response) { + responseListener.onSuccess(response); + } + + void onDefinitiveFailure(Exception exception) { + trackFailure(exception); + responseListener.onFailure(this.exception); + } + + void trackFailure(Exception exception) { + this.exception = addSuppressedException(this.exception, exception); + } + } + + private static class SyncResponseListener implements ResponseListener { + final CountDownLatch latch = new CountDownLatch(1); + volatile Response response; + volatile Exception exception; + + @Override + public void onSuccess(Response response) { + this.response = response; + latch.countDown(); + + } + + @Override + public void onFailure(Exception exception) { + this.exception = exception; + latch.countDown(); + } + + Response get() throws Exception { + latch.await(); + if (response != null) { + assert exception == null; + return response; + } + assert exception != null; + throw exception; + } + } + /** * Returns a new {@link Builder} to help with {@link RestClient} creation. */ @@ -380,13 +537,17 @@ public final class RestClient implements Closeable { } /** - * Rest client builder. Helps creating a new {@link RestClient}. + * Helps creating a new {@link RestClient}. Allows to set the most common http client configuration options when internally + * creating the underlying {@link org.apache.http.nio.client.HttpAsyncClient}. Also allows to provide an externally created + * {@link org.apache.http.nio.client.HttpAsyncClient} in case additional customization is needed. */ public static final class Builder { public static final int DEFAULT_CONNECT_TIMEOUT_MILLIS = 1000; public static final int DEFAULT_SOCKET_TIMEOUT_MILLIS = 10000; public static final int DEFAULT_MAX_RETRY_TIMEOUT_MILLIS = DEFAULT_SOCKET_TIMEOUT_MILLIS; public static final int DEFAULT_CONNECTION_REQUEST_TIMEOUT_MILLIS = 500; + public static final int DEFAULT_MAX_CONN_PER_ROUTE = 10; + public static final int DEFAULT_MAX_CONN_TOTAL = 30; private static final Header[] EMPTY_HEADERS = new Header[0]; @@ -408,21 +569,7 @@ public final class RestClient implements Closeable { } /** - * Sets the maximum timeout (in milliseconds) to honour in case of multiple retries of the same request. - * {@link #DEFAULT_MAX_RETRY_TIMEOUT_MILLIS} if not specified. - * - * @throws IllegalArgumentException if maxRetryTimeoutMillis is not greater than 0 - */ - public Builder setMaxRetryTimeoutMillis(int maxRetryTimeoutMillis) { - if (maxRetryTimeoutMillis <= 0) { - throw new IllegalArgumentException("maxRetryTimeoutMillis must be greater than 0"); - } - this.maxRetryTimeout = maxRetryTimeoutMillis; - return this; - } - - /** - * Sets the default request headers, to be used sent with every request unless overridden on a per request basis + * Sets the default request headers, which will be sent along with each request */ public Builder setDefaultHeaders(Header[] defaultHeaders) { Objects.requireNonNull(defaultHeaders, "defaultHeaders must not be null"); @@ -442,6 +589,20 @@ public final class RestClient implements Closeable { return this; } + /** + * Sets the maximum timeout (in milliseconds) to honour in case of multiple retries of the same request. + * {@link #DEFAULT_MAX_RETRY_TIMEOUT_MILLIS} if not specified. + * + * @throws IllegalArgumentException if maxRetryTimeoutMillis is not greater than 0 + */ + public Builder setMaxRetryTimeoutMillis(int maxRetryTimeoutMillis) { + if (maxRetryTimeoutMillis <= 0) { + throw new IllegalArgumentException("maxRetryTimeoutMillis must be greater than 0"); + } + this.maxRetryTimeout = maxRetryTimeoutMillis; + return this; + } + /** * Sets the {@link HttpClientConfigCallback} to be used to customize http client configuration */ @@ -467,29 +628,23 @@ public final class RestClient implements Closeable { if (failureListener == null) { failureListener = new FailureListener(); } - CloseableHttpClient httpClient = createHttpClient(); + CloseableHttpAsyncClient httpClient = createHttpClient(); + httpClient.start(); return new RestClient(httpClient, maxRetryTimeout, defaultHeaders, hosts, failureListener); } - private CloseableHttpClient createHttpClient() { + private CloseableHttpAsyncClient createHttpClient() { //default timeouts are all infinite RequestConfig.Builder requestConfigBuilder = RequestConfig.custom().setConnectTimeout(DEFAULT_CONNECT_TIMEOUT_MILLIS) .setSocketTimeout(DEFAULT_SOCKET_TIMEOUT_MILLIS) .setConnectionRequestTimeout(DEFAULT_CONNECTION_REQUEST_TIMEOUT_MILLIS); - if (requestConfigCallback != null) { requestConfigCallback.customizeRequestConfig(requestConfigBuilder); } - RequestConfig requestConfig = requestConfigBuilder.build(); - - PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(); - //default settings may be too constraining - connectionManager.setDefaultMaxPerRoute(10); - connectionManager.setMaxTotal(30); - - HttpClientBuilder httpClientBuilder = HttpClientBuilder.create().setConnectionManager(connectionManager) - .setDefaultRequestConfig(requestConfig); + HttpAsyncClientBuilder httpClientBuilder = HttpAsyncClientBuilder.create().setDefaultRequestConfig(requestConfigBuilder.build()) + //default settings for connection pooling may be too constraining + .setMaxConnPerRoute(DEFAULT_MAX_CONN_PER_ROUTE).setMaxConnTotal(DEFAULT_MAX_CONN_TOTAL); if (httpClientConfigCallback != null) { httpClientConfigCallback.customizeHttpClient(httpClientBuilder); } @@ -517,12 +672,12 @@ public final class RestClient implements Closeable { */ public interface HttpClientConfigCallback { /** - * Allows to customize the {@link CloseableHttpClient} being created and used by the {@link RestClient}. - * It is common to customzie the default {@link org.apache.http.client.CredentialsProvider} through this method, - * without losing any other useful default value that the {@link RestClient.Builder} internally sets. - * Also useful to setup ssl through {@link SSLSocketFactoryHttpConfigCallback}. + * Allows to customize the {@link CloseableHttpAsyncClient} being created and used by the {@link RestClient}. + * Commonly used to customize the default {@link org.apache.http.client.CredentialsProvider} for authentication + * or the {@link SchemeIOSessionStrategy} for communication through ssl without losing any other useful default + * value that the {@link RestClient.Builder} internally sets, like connection pooling. */ - void customizeHttpClient(HttpClientBuilder httpClientBuilder); + void customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder); } /** @@ -533,7 +688,7 @@ public final class RestClient implements Closeable { /** * Notifies that the host provided as argument has just failed */ - public void onFailure(HttpHost host) throws IOException { + public void onFailure(HttpHost host) { } } diff --git a/client/rest/src/main/java/org/elasticsearch/client/SSLSocketFactoryHttpConfigCallback.java b/client/rest/src/main/java/org/elasticsearch/client/SSLSocketFactoryHttpConfigCallback.java deleted file mode 100644 index 3f18f9938c1..00000000000 --- a/client/rest/src/main/java/org/elasticsearch/client/SSLSocketFactoryHttpConfigCallback.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.client; - -import org.apache.http.config.Registry; -import org.apache.http.config.RegistryBuilder; -import org.apache.http.conn.socket.ConnectionSocketFactory; -import org.apache.http.conn.socket.PlainConnectionSocketFactory; -import org.apache.http.conn.ssl.SSLConnectionSocketFactory; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; - -/** - * Helps configuring the http client when needing to communicate over ssl. It effectively replaces the connection manager - * with one that has ssl properly configured thanks to the provided {@link SSLConnectionSocketFactory}. - */ -public class SSLSocketFactoryHttpConfigCallback implements RestClient.HttpClientConfigCallback { - - private final SSLConnectionSocketFactory sslSocketFactory; - - public SSLSocketFactoryHttpConfigCallback(SSLConnectionSocketFactory sslSocketFactory) { - this.sslSocketFactory = sslSocketFactory; - } - - @Override - public void customizeHttpClient(HttpClientBuilder httpClientBuilder) { - Registry socketFactoryRegistry = RegistryBuilder.create() - .register("http", PlainConnectionSocketFactory.getSocketFactory()) - .register("https", sslSocketFactory).build(); - PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(socketFactoryRegistry); - //default settings may be too constraining - connectionManager.setDefaultMaxPerRoute(10); - connectionManager.setMaxTotal(30); - httpClientBuilder.setConnectionManager(connectionManager); - } -} diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java index f032933db24..dc1c03b06a3 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java @@ -23,7 +23,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomInts; import org.apache.http.Header; import org.apache.http.HttpHost; import org.apache.http.client.config.RequestConfig; -import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.apache.http.message.BasicHeader; import java.io.IOException; @@ -108,7 +108,7 @@ public class RestClientBuilderTests extends RestClientTestCase { if (getRandom().nextBoolean()) { builder.setHttpClientConfigCallback(new RestClient.HttpClientConfigCallback() { @Override - public void customizeHttpClient(HttpClientBuilder httpClientBuilder) { + public void customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { } }); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java index 4a14c174353..b1f9b66557b 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java @@ -27,6 +27,7 @@ import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; import org.apache.http.Consts; import org.apache.http.Header; +import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHeader; @@ -143,7 +144,7 @@ public class RestClientIntegTests extends RestClientTestCase { public void testHeaders() throws Exception { for (String method : getHttpMethods()) { Set standardHeaders = new HashSet<>( - Arrays.asList("Accept-encoding", "Connection", "Host", "User-agent", "Date")); + Arrays.asList("Connection", "Host", "User-agent", "Date")); if (method.equals("HEAD") == false) { standardHeaders.add("Content-length"); } @@ -162,9 +163,9 @@ public class RestClientIntegTests extends RestClientTestCase { int statusCode = randomStatusCode(getRandom()); Response esResponse; - try (Response response = restClient.performRequest(method, "/" + statusCode, - Collections.emptyMap(), null, headers)) { - esResponse = response; + try { + esResponse = restClient.performRequest(method, "/" + statusCode, Collections.emptyMap(), + (HttpEntity)null, headers); } catch(ResponseException e) { esResponse = e.getResponse(); } @@ -204,18 +205,14 @@ public class RestClientIntegTests extends RestClientTestCase { private void bodyTest(String method) throws Exception { String requestBody = "{ \"field\": \"value\" }"; StringEntity entity = new StringEntity(requestBody); - Response esResponse; - String responseBody; int statusCode = randomStatusCode(getRandom()); - try (Response response = restClient.performRequest(method, "/" + statusCode, - Collections.emptyMap(), entity)) { - responseBody = EntityUtils.toString(response.getEntity()); - esResponse = response; + Response esResponse; + try { + esResponse = restClient.performRequest(method, "/" + statusCode, Collections.emptyMap(), entity); } catch(ResponseException e) { - responseBody = e.getResponseBody(); esResponse = e.getResponse(); } assertEquals(statusCode, esResponse.getStatusLine().getStatusCode()); - assertEquals(requestBody, responseBody); + assertEquals(requestBody, EntityUtils.toString(esResponse.getEntity())); } } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java index 1a469f2e2ca..957802634bc 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java @@ -22,14 +22,17 @@ package org.elasticsearch.client; import com.carrotsearch.randomizedtesting.generators.RandomInts; import org.apache.http.Header; import org.apache.http.HttpHost; -import org.apache.http.HttpRequest; +import org.apache.http.HttpResponse; import org.apache.http.ProtocolVersion; import org.apache.http.StatusLine; -import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpUriRequest; +import org.apache.http.concurrent.FutureCallback; import org.apache.http.conn.ConnectTimeoutException; -import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; +import org.apache.http.message.BasicHttpResponse; import org.apache.http.message.BasicStatusLine; +import org.apache.http.nio.protocol.HttpAsyncRequestProducer; +import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; import org.junit.Before; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -39,6 +42,7 @@ import java.net.SocketTimeoutException; import java.util.Collections; import java.util.HashSet; import java.util.Set; +import java.util.concurrent.Future; import static org.elasticsearch.client.RestClientTestUtil.randomErrorNoRetryStatusCode; import static org.elasticsearch.client.RestClientTestUtil.randomErrorRetryStatusCode; @@ -65,27 +69,33 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { private TrackingFailureListener failureListener; @Before + @SuppressWarnings("unchecked") public void createRestClient() throws IOException { - CloseableHttpClient httpClient = mock(CloseableHttpClient.class); - when(httpClient.execute(any(HttpHost.class), any(HttpRequest.class))).thenAnswer(new Answer() { + CloseableHttpAsyncClient httpClient = mock(CloseableHttpAsyncClient.class); + when(httpClient.execute(any(HttpAsyncRequestProducer.class), any(HttpAsyncResponseConsumer.class), + any(FutureCallback.class))).thenAnswer(new Answer>() { @Override - public CloseableHttpResponse answer(InvocationOnMock invocationOnMock) throws Throwable { - HttpHost httpHost = (HttpHost) invocationOnMock.getArguments()[0]; - HttpUriRequest request = (HttpUriRequest) invocationOnMock.getArguments()[1]; + public Future answer(InvocationOnMock invocationOnMock) throws Throwable { + HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0]; + HttpUriRequest request = (HttpUriRequest)requestProducer.generateRequest(); + HttpHost httpHost = requestProducer.getTarget(); + @SuppressWarnings("unchecked") + FutureCallback futureCallback = (FutureCallback) invocationOnMock.getArguments()[2]; //return the desired status code or exception depending on the path if (request.getURI().getPath().equals("/soe")) { - throw new SocketTimeoutException(httpHost.toString()); + futureCallback.failed(new SocketTimeoutException(httpHost.toString())); } else if (request.getURI().getPath().equals("/coe")) { - throw new ConnectTimeoutException(httpHost.toString()); + futureCallback.failed(new ConnectTimeoutException(httpHost.toString())); } else if (request.getURI().getPath().equals("/ioe")) { - throw new IOException(httpHost.toString()); + futureCallback.failed(new IOException(httpHost.toString())); + } else { + int statusCode = Integer.parseInt(request.getURI().getPath().substring(1)); + StatusLine statusLine = new BasicStatusLine(new ProtocolVersion("http", 1, 1), statusCode, ""); + futureCallback.completed(new BasicHttpResponse(statusLine)); } - int statusCode = Integer.parseInt(request.getURI().getPath().substring(1)); - StatusLine statusLine = new BasicStatusLine(new ProtocolVersion("http", 1, 1), statusCode, ""); - return new CloseableBasicHttpResponse(statusLine); + return null; } }); - int numHosts = RandomInts.randomIntBetween(getRandom(), 2, 5); httpHosts = new HttpHost[numHosts]; for (int i = 0; i < numHosts; i++) { @@ -102,10 +112,9 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { Collections.addAll(hostsSet, httpHosts); for (int j = 0; j < httpHosts.length; j++) { int statusCode = randomOkStatusCode(getRandom()); - try (Response response = restClient.performRequest(randomHttpMethod(getRandom()), "/" + statusCode)) { - assertThat(response.getStatusLine().getStatusCode(), equalTo(statusCode)); - assertTrue("host not found: " + response.getHost(), hostsSet.remove(response.getHost())); - } + Response response = restClient.performRequest(randomHttpMethod(getRandom()), "/" + statusCode); + assertEquals(statusCode, response.getStatusLine().getStatusCode()); + assertTrue("host not found: " + response.getHost(), hostsSet.remove(response.getHost())); } assertEquals("every host should have been used but some weren't: " + hostsSet, 0, hostsSet.size()); } @@ -120,11 +129,12 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { for (int j = 0; j < httpHosts.length; j++) { String method = randomHttpMethod(getRandom()); int statusCode = randomErrorNoRetryStatusCode(getRandom()); - try (Response response = restClient.performRequest(method, "/" + statusCode)) { + try { + Response response = restClient.performRequest(method, "/" + statusCode); if (method.equals("HEAD") && statusCode == 404) { //no exception gets thrown although we got a 404 - assertThat(response.getStatusLine().getStatusCode(), equalTo(404)); - assertThat(response.getStatusLine().getStatusCode(), equalTo(statusCode)); + assertEquals(404, response.getStatusLine().getStatusCode()); + assertEquals(statusCode, response.getStatusLine().getStatusCode()); assertTrue("host not found: " + response.getHost(), hostsSet.remove(response.getHost())); } else { fail("request should have failed"); @@ -134,7 +144,7 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { throw e; } Response response = e.getResponse(); - assertThat(response.getStatusLine().getStatusCode(), equalTo(statusCode)); + assertEquals(statusCode, response.getStatusLine().getStatusCode()); assertTrue("host not found: " + response.getHost(), hostsSet.remove(response.getHost())); assertEquals(0, e.getSuppressed().length); } @@ -156,7 +166,7 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { failureListener.assertCalled(httpHosts); do { Response response = e.getResponse(); - assertThat(response.getStatusLine().getStatusCode(), equalTo(Integer.parseInt(retryEndpoint.substring(1)))); + assertEquals(Integer.parseInt(retryEndpoint.substring(1)), response.getStatusLine().getStatusCode()); assertTrue("host [" + response.getHost() + "] not found, most likely used multiple times", hostsSet.remove(response.getHost())); if (e.getSuppressed().length > 0) { @@ -223,8 +233,8 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { for (int y = 0; y < iters; y++) { int statusCode = randomErrorNoRetryStatusCode(getRandom()); Response response; - try (Response esResponse = restClient.performRequest(randomHttpMethod(getRandom()), "/" + statusCode)) { - response = esResponse; + try { + response = restClient.performRequest(randomHttpMethod(getRandom()), "/" + statusCode); } catch(ResponseException e) { response = e.getResponse(); diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index e7c9de934b6..403b86753c7 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -26,9 +26,9 @@ import org.apache.http.HttpEntity; import org.apache.http.HttpEntityEnclosingRequest; import org.apache.http.HttpHost; import org.apache.http.HttpRequest; +import org.apache.http.HttpResponse; import org.apache.http.ProtocolVersion; import org.apache.http.StatusLine; -import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpOptions; import org.apache.http.client.methods.HttpPatch; @@ -37,11 +37,15 @@ import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpTrace; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.client.utils.URIBuilder; +import org.apache.http.concurrent.FutureCallback; import org.apache.http.conn.ConnectTimeoutException; import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; import org.apache.http.message.BasicHeader; +import org.apache.http.message.BasicHttpResponse; import org.apache.http.message.BasicStatusLine; +import org.apache.http.nio.protocol.HttpAsyncRequestProducer; +import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; import org.apache.http.util.EntityUtils; import org.junit.Before; import org.mockito.ArgumentCaptor; @@ -51,11 +55,11 @@ import org.mockito.stubbing.Answer; import java.io.IOException; import java.net.SocketTimeoutException; import java.net.URI; -import java.net.URISyntaxException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.concurrent.Future; import static org.elasticsearch.client.RestClientTestUtil.getAllErrorStatusCodes; import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods; @@ -86,39 +90,49 @@ public class RestClientSingleHostTests extends RestClientTestCase { private RestClient restClient; private Header[] defaultHeaders; private HttpHost httpHost; - private CloseableHttpClient httpClient; + private CloseableHttpAsyncClient httpClient; private TrackingFailureListener failureListener; @Before + @SuppressWarnings("unchecked") public void createRestClient() throws IOException { - httpClient = mock(CloseableHttpClient.class); - when(httpClient.execute(any(HttpHost.class), any(HttpRequest.class))).thenAnswer(new Answer() { - @Override - public CloseableHttpResponse answer(InvocationOnMock invocationOnMock) throws Throwable { - HttpUriRequest request = (HttpUriRequest) invocationOnMock.getArguments()[1]; - //return the desired status code or exception depending on the path - if (request.getURI().getPath().equals("/soe")) { - throw new SocketTimeoutException(); - } else if (request.getURI().getPath().equals("/coe")) { - throw new ConnectTimeoutException(); - } - int statusCode = Integer.parseInt(request.getURI().getPath().substring(1)); - StatusLine statusLine = new BasicStatusLine(new ProtocolVersion("http", 1, 1), statusCode, ""); + httpClient = mock(CloseableHttpAsyncClient.class); + when(httpClient.execute(any(HttpAsyncRequestProducer.class), any(HttpAsyncResponseConsumer.class), + any(FutureCallback.class))).thenAnswer(new Answer>() { + @Override + public Future answer(InvocationOnMock invocationOnMock) throws Throwable { + HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0]; + @SuppressWarnings("unchecked") + FutureCallback futureCallback = (FutureCallback) invocationOnMock.getArguments()[2]; + HttpUriRequest request = (HttpUriRequest)requestProducer.generateRequest(); + //return the desired status code or exception depending on the path + if (request.getURI().getPath().equals("/soe")) { + futureCallback.failed(new SocketTimeoutException()); + } else if (request.getURI().getPath().equals("/coe")) { + futureCallback.failed(new ConnectTimeoutException()); + } else { + int statusCode = Integer.parseInt(request.getURI().getPath().substring(1)); + StatusLine statusLine = new BasicStatusLine(new ProtocolVersion("http", 1, 1), statusCode, ""); - CloseableHttpResponse httpResponse = new CloseableBasicHttpResponse(statusLine); - //return the same body that was sent - if (request instanceof HttpEntityEnclosingRequest) { - HttpEntity entity = ((HttpEntityEnclosingRequest) request).getEntity(); - if (entity != null) { - assertTrue("the entity is not repeatable, cannot set it to the response directly", entity.isRepeatable()); - httpResponse.setEntity(entity); + HttpResponse httpResponse = new BasicHttpResponse(statusLine); + //return the same body that was sent + if (request instanceof HttpEntityEnclosingRequest) { + HttpEntity entity = ((HttpEntityEnclosingRequest) request).getEntity(); + if (entity != null) { + assertTrue("the entity is not repeatable, cannot set it to the response directly", + entity.isRepeatable()); + httpResponse.setEntity(entity); + } + } + //return the same headers that were sent + httpResponse.setHeaders(request.getAllHeaders()); + futureCallback.completed(httpResponse); + } + return null; } - } - //return the same headers that were sent - httpResponse.setHeaders(request.getAllHeaders()); - return httpResponse; - } - }); + }); + + int numHeaders = RandomInts.randomIntBetween(getRandom(), 0, 3); defaultHeaders = new Header[numHeaders]; for (int i = 0; i < numHeaders; i++) { @@ -134,13 +148,15 @@ public class RestClientSingleHostTests extends RestClientTestCase { /** * Verifies the content of the {@link HttpRequest} that's internally created and passed through to the http client */ + @SuppressWarnings("unchecked") public void testInternalHttpRequest() throws Exception { - ArgumentCaptor requestArgumentCaptor = ArgumentCaptor.forClass(HttpUriRequest.class); + ArgumentCaptor requestArgumentCaptor = ArgumentCaptor.forClass(HttpAsyncRequestProducer.class); int times = 0; for (String httpMethod : getHttpMethods()) { HttpUriRequest expectedRequest = performRandomRequest(httpMethod); - verify(httpClient, times(++times)).execute(any(HttpHost.class), requestArgumentCaptor.capture()); - HttpUriRequest actualRequest = requestArgumentCaptor.getValue(); + verify(httpClient, times(++times)).execute(requestArgumentCaptor.capture(), + any(HttpAsyncResponseConsumer.class), any(FutureCallback.class)); + HttpUriRequest actualRequest = (HttpUriRequest)requestArgumentCaptor.getValue().generateRequest(); assertEquals(expectedRequest.getURI(), actualRequest.getURI()); assertEquals(expectedRequest.getClass(), actualRequest.getClass()); assertArrayEquals(expectedRequest.getAllHeaders(), actualRequest.getAllHeaders()); @@ -201,7 +217,8 @@ public class RestClientSingleHostTests extends RestClientTestCase { for (String method : getHttpMethods()) { //error status codes should cause an exception to be thrown for (int errorStatusCode : getAllErrorStatusCodes()) { - try (Response response = performRequest(method, "/" + errorStatusCode)) { + try { + Response response = performRequest(method, "/" + errorStatusCode); if (method.equals("HEAD") && errorStatusCode == 404) { //no exception gets thrown although we got a 404 assertThat(response.getStatusLine().getStatusCode(), equalTo(errorStatusCode)); @@ -223,7 +240,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { } } - public void testIOExceptions() throws IOException { + public void testIOExceptions() throws Exception { for (String method : getHttpMethods()) { //IOExceptions should be let bubble up try { @@ -252,11 +269,9 @@ public class RestClientSingleHostTests extends RestClientTestCase { StringEntity entity = new StringEntity(body); for (String method : Arrays.asList("DELETE", "GET", "PATCH", "POST", "PUT")) { for (int okStatusCode : getOkStatusCodes()) { - try (Response response = restClient.performRequest(method, "/" + okStatusCode, - Collections.emptyMap(), entity)) { - assertThat(response.getStatusLine().getStatusCode(), equalTo(okStatusCode)); - assertThat(EntityUtils.toString(response.getEntity()), equalTo(body)); - } + Response response = restClient.performRequest(method, "/" + okStatusCode, Collections.emptyMap(), entity); + assertThat(response.getStatusLine().getStatusCode(), equalTo(okStatusCode)); + assertThat(EntityUtils.toString(response.getEntity()), equalTo(body)); } for (int errorStatusCode : getAllErrorStatusCodes()) { try { @@ -334,9 +349,8 @@ public class RestClientSingleHostTests extends RestClientTestCase { int statusCode = randomStatusCode(getRandom()); Response esResponse; - try (Response response = restClient.performRequest(method, "/" + statusCode, - Collections.emptyMap(), null, headers)) { - esResponse = response; + try { + esResponse = restClient.performRequest(method, "/" + statusCode, headers); } catch(ResponseException e) { esResponse = e.getResponse(); } @@ -349,7 +363,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { } } - private HttpUriRequest performRandomRequest(String method) throws IOException, URISyntaxException { + private HttpUriRequest performRandomRequest(String method) throws Exception { String uriAsString = "/" + randomStatusCode(getRandom()); URIBuilder uriBuilder = new URIBuilder(uriAsString); Map params = Collections.emptyMap(); @@ -434,14 +448,14 @@ public class RestClientSingleHostTests extends RestClientTestCase { return request; } - private Response performRequest(String method, String endpoint, Header... headers) throws IOException { + private Response performRequest(String method, String endpoint, Header... headers) throws Exception { switch(randomIntBetween(0, 2)) { case 0: return restClient.performRequest(method, endpoint, headers); case 1: return restClient.performRequest(method, endpoint, Collections.emptyMap(), headers); case 2: - return restClient.performRequest(method, endpoint, Collections.emptyMap(), null, headers); + return restClient.performRequest(method, endpoint, Collections.emptyMap(), (HttpEntity)null, headers); default: throw new UnsupportedOperationException(); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/TrackingFailureListener.java b/client/rest/src/test/java/org/elasticsearch/client/TrackingFailureListener.java index 35842823923..92033b72cef 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/TrackingFailureListener.java +++ b/client/rest/src/test/java/org/elasticsearch/client/TrackingFailureListener.java @@ -21,7 +21,6 @@ package org.elasticsearch.client; import org.apache.http.HttpHost; -import java.io.IOException; import java.util.HashSet; import java.util.Set; @@ -33,10 +32,10 @@ import static org.junit.Assert.assertThat; * {@link org.elasticsearch.client.RestClient.FailureListener} impl that allows to track when it gets called */ class TrackingFailureListener extends RestClient.FailureListener { - private Set hosts = new HashSet<>(); + private volatile Set hosts = new HashSet<>(); @Override - public void onFailure(HttpHost host) throws IOException { + public void onFailure(HttpHost host) { hosts.add(host); } diff --git a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/HostsSniffer.java b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/HostsSniffer.java index bfe21f5e7d1..4aae68caef0 100644 --- a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/HostsSniffer.java +++ b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/HostsSniffer.java @@ -61,10 +61,9 @@ public class HostsSniffer { /** * Calls the elasticsearch nodes info api, parses the response and returns all the found http hosts */ - public List sniffHosts() throws IOException { - try (Response response = restClient.performRequest("get", "/_nodes/http", sniffRequestParams)) { - return readHosts(response.getEntity()); - } + public List sniffHosts() throws Exception { + Response response = restClient.performRequest("get", "/_nodes/http", sniffRequestParams); + return readHosts(response.getEntity()); } private List readHosts(HttpEntity entity) throws IOException { diff --git a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/SniffOnFailureListener.java b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/SniffOnFailureListener.java index 76350057141..cbc77351de9 100644 --- a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/SniffOnFailureListener.java +++ b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/SniffOnFailureListener.java @@ -22,7 +22,6 @@ package org.elasticsearch.client.sniff; import org.apache.http.HttpHost; import org.elasticsearch.client.RestClient; -import java.io.IOException; import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; @@ -55,7 +54,7 @@ public class SniffOnFailureListener extends RestClient.FailureListener { } @Override - public void onFailure(HttpHost host) throws IOException { + public void onFailure(HttpHost host) { if (sniffer == null) { throw new IllegalStateException("sniffer was not set, unable to sniff on failure"); } diff --git a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferTests.java b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferTests.java index 6e0c3a728d5..5a9fd4033d1 100644 --- a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferTests.java +++ b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferTests.java @@ -43,7 +43,6 @@ import java.io.OutputStream; import java.io.StringWriter; import java.net.InetAddress; import java.net.InetSocketAddress; -import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -86,7 +85,7 @@ public class HostsSnifferTests extends RestClientTestCase { httpServer.stop(0); } - public void testSniffNodes() throws IOException, URISyntaxException { + public void testSniffNodes() throws Exception { HttpHost httpHost = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()); try (RestClient restClient = RestClient.builder(httpHost).build()) { HostsSniffer.Builder builder = HostsSniffer.builder(restClient).setSniffRequestTimeoutMillis(sniffRequestTimeout); diff --git a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/SniffOnFailureListenerTests.java b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/SniffOnFailureListenerTests.java index 6a71d72f60e..bbb1de35663 100644 --- a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/SniffOnFailureListenerTests.java +++ b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/SniffOnFailureListenerTests.java @@ -45,16 +45,17 @@ public class SniffOnFailureListenerTests extends RestClientTestCase { assertEquals("sniffer must not be null", e.getMessage()); } - RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)).build(); - try (Sniffer sniffer = Sniffer.builder(restClient, new MockHostsSniffer()).build()) { - listener.setSniffer(sniffer); - try { + try (RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)).build()) { + try (Sniffer sniffer = Sniffer.builder(restClient, new MockHostsSniffer()).build()) { listener.setSniffer(sniffer); - fail("should have failed"); - } catch(IllegalStateException e) { - assertEquals("sniffer can only be set once", e.getMessage()); + try { + listener.setSniffer(sniffer); + fail("should have failed"); + } catch(IllegalStateException e) { + assertEquals("sniffer can only be set once", e.getMessage()); + } + listener.onFailure(new HttpHost("localhost", 9200)); } - listener.onFailure(new HttpHost("localhost", 9200)); } } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java index 62dbd59f80a..5a08ab6a2e7 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java @@ -214,10 +214,9 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { threadPool.generic().execute(new AbstractRunnable() { @Override protected void doRun() throws Exception { - try (org.elasticsearch.client.Response response = restClient.performRequest(method, uri, params, entity)) { - InputStream markSupportedInputStream = new BufferedInputStream(response.getEntity().getContent()); - listener.onResponse(markSupportedInputStream); - } + org.elasticsearch.client.Response response = restClient.performRequest(method, uri, params, entity); + InputStream markSupportedInputStream = new BufferedInputStream(response.getEntity().getContent()); + listener.onResponse(markSupportedInputStream); } @Override diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java index caaa328b1ab..f39abb445d8 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java @@ -220,18 +220,15 @@ public class ContextAndHeaderTransportIT extends HttpSmokeTestCase { public void testThatRelevantHttpHeadersBecomeRequestHeaders() throws Exception { final String IRRELEVANT_HEADER = "SomeIrrelevantHeader"; - - try (Response response = getRestClient().performRequest( - "GET", "/" + queryIndex + "/_search", - new BasicHeader(CUSTOM_HEADER, randomHeaderValue), new BasicHeader(IRRELEVANT_HEADER, randomHeaderValue))) { - assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); - List searchRequests = getRequests(SearchRequest.class); - assertThat(searchRequests, hasSize(greaterThan(0))); - for (RequestAndHeaders requestAndHeaders : searchRequests) { - assertThat(requestAndHeaders.headers.containsKey(CUSTOM_HEADER), is(true)); - // was not specified, thus is not included - assertThat(requestAndHeaders.headers.containsKey(IRRELEVANT_HEADER), is(false)); - } + Response response = getRestClient().performRequest("GET", "/" + queryIndex + "/_search", + new BasicHeader(CUSTOM_HEADER, randomHeaderValue), new BasicHeader(IRRELEVANT_HEADER, randomHeaderValue)); + assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); + List searchRequests = getRequests(SearchRequest.class); + assertThat(searchRequests, hasSize(greaterThan(0))); + for (RequestAndHeaders requestAndHeaders : searchRequests) { + assertThat(requestAndHeaders.headers.containsKey(CUSTOM_HEADER), is(true)); + // was not specified, thus is not included + assertThat(requestAndHeaders.headers.containsKey(IRRELEVANT_HEADER), is(false)); } } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsNotSetIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsNotSetIT.java index 7cc84354f6f..576d5c0db96 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsNotSetIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsNotSetIT.java @@ -28,22 +28,19 @@ import static org.hamcrest.Matchers.nullValue; public class CorsNotSetIT extends HttpSmokeTestCase { - public void testCorsSettingDefaultBehaviourDoesNotReturnAnything() throws Exception { String corsValue = "http://localhost:9200"; - try (Response response = getRestClient().performRequest("GET", "/", - new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue))) { - assertThat(response.getStatusLine().getStatusCode(), is(200)); - assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); - assertThat(response.getHeader("Access-Control-Allow-Credentials"), nullValue()); - } + Response response = getRestClient().performRequest("GET", "/", + new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue)); + assertThat(response.getStatusLine().getStatusCode(), is(200)); + assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); + assertThat(response.getHeader("Access-Control-Allow-Credentials"), nullValue()); } public void testThatOmittingCorsHeaderDoesNotReturnAnything() throws Exception { - try (Response response = getRestClient().performRequest("GET", "/")) { - assertThat(response.getStatusLine().getStatusCode(), is(200)); - assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); - assertThat(response.getHeader("Access-Control-Allow-Credentials"), nullValue()); - } + Response response = getRestClient().performRequest("GET", "/"); + assertThat(response.getStatusLine().getStatusCode(), is(200)); + assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); + assertThat(response.getHeader("Access-Control-Allow-Credentials"), nullValue()); } } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsRegexIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsRegexIT.java index a7d3f4156df..5bcef4828c9 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsRegexIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsRegexIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.jboss.netty.handler.codec.http.HttpHeaders; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_METHODS; @@ -59,16 +60,15 @@ public class CorsRegexIT extends HttpSmokeTestCase { public void testThatRegularExpressionWorksOnMatch() throws Exception { String corsValue = "http://localhost:9200"; - try (Response response = getRestClient().performRequest("GET", "/", - new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue))) { - assertResponseWithOriginheader(response, corsValue); - } + Response response = getRestClient().performRequest("GET", "/", + new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue)); + assertResponseWithOriginheader(response, corsValue); + corsValue = "https://localhost:9200"; - try (Response response = getRestClient().performRequest("GET", "/", - new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue));) { - assertResponseWithOriginheader(response, corsValue); - assertThat(response.getHeader("Access-Control-Allow-Credentials"), is("true")); - } + response = getRestClient().performRequest("GET", "/", + new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue)); + assertResponseWithOriginheader(response, corsValue); + assertThat(response.getHeader("Access-Control-Allow-Credentials"), is("true")); } public void testThatRegularExpressionReturnsForbiddenOnNonMatch() throws Exception { @@ -85,27 +85,24 @@ public class CorsRegexIT extends HttpSmokeTestCase { } public void testThatSendingNoOriginHeaderReturnsNoAccessControlHeader() throws Exception { - try (Response response = getRestClient().performRequest("GET", "/", new BasicHeader("User-Agent", "Mozilla Bar"))) { - assertThat(response.getStatusLine().getStatusCode(), is(200)); - assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); - } + Response response = getRestClient().performRequest("GET", "/", new BasicHeader("User-Agent", "Mozilla Bar")); + assertThat(response.getStatusLine().getStatusCode(), is(200)); + assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); } public void testThatRegularExpressionIsNotAppliedWithoutCorrectBrowserOnMatch() throws Exception { - try (Response response = getRestClient().performRequest("GET", "/")) { - assertThat(response.getStatusLine().getStatusCode(), is(200)); - assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); - } + Response response = getRestClient().performRequest("GET", "/"); + assertThat(response.getStatusLine().getStatusCode(), is(200)); + assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); } public void testThatPreFlightRequestWorksOnMatch() throws Exception { String corsValue = "http://localhost:9200"; - try (Response response = getRestClient().performRequest("OPTIONS", "/", + Response response = getRestClient().performRequest("OPTIONS", "/", new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue), - new BasicHeader("Access-Control-Request-Method", "GET"));) { - assertResponseWithOriginheader(response, corsValue); - assertNotNull(response.getHeader("Access-Control-Allow-Methods")); - } + new BasicHeader(HttpHeaders.Names.ACCESS_CONTROL_REQUEST_METHOD, "GET")); + assertResponseWithOriginheader(response, corsValue); + assertNotNull(response.getHeader("Access-Control-Allow-Methods")); } public void testThatPreFlightRequestReturnsNullOnNonMatch() throws Exception { diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java index f3b5d214fa4..4455eaab258 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java @@ -78,7 +78,7 @@ public class DeprecationHttpIT extends HttpSmokeTestCase { * Attempts to do a scatter/gather request that expects unique responses per sub-request. */ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/19222") - public void testUniqueDeprecationResponsesMergedTogether() throws IOException { + public void testUniqueDeprecationResponsesMergedTogether() throws Exception { final String[] indices = new String[randomIntBetween(2, 5)]; // add at least one document for each index @@ -99,35 +99,31 @@ public class DeprecationHttpIT extends HttpSmokeTestCase { final String commaSeparatedIndices = Stream.of(indices).collect(Collectors.joining(",")); - final String body = - "{\"query\":{\"bool\":{\"filter\":[{\"" + TestDeprecatedQueryBuilder.NAME + "\":{}}]}}}"; + final String body = "{\"query\":{\"bool\":{\"filter\":[{\"" + TestDeprecatedQueryBuilder.NAME + "\":{}}]}}}"; // trigger all index deprecations - try (Response response = getRestClient().performRequest("GET", - "/" + commaSeparatedIndices + "/_search", - Collections.emptyMap(), - new StringEntity(body, RestClient.JSON_CONTENT_TYPE))) { - assertThat(response.getStatusLine().getStatusCode(), equalTo(OK.getStatus())); + Response response = getRestClient().performRequest("GET", "/" + commaSeparatedIndices + "/_search", + Collections.emptyMap(), new StringEntity(body, RestClient.JSON_CONTENT_TYPE)); + assertThat(response.getStatusLine().getStatusCode(), equalTo(OK.getStatus())); - final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); - final List> headerMatchers = new ArrayList<>(indices.length); + final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); + final List> headerMatchers = new ArrayList<>(indices.length); - for (String index : indices) { - headerMatchers.add(containsString(LoggerMessageFormat.format("[{}] index", (Object)index))); - } + for (String index : indices) { + headerMatchers.add(containsString(LoggerMessageFormat.format("[{}] index", (Object)index))); + } - assertThat(deprecatedWarnings, hasSize(headerMatchers.size())); - for (Matcher headerMatcher : headerMatchers) { - assertThat(deprecatedWarnings, hasItem(headerMatcher)); - } + assertThat(deprecatedWarnings, hasSize(headerMatchers.size())); + for (Matcher headerMatcher : headerMatchers) { + assertThat(deprecatedWarnings, hasItem(headerMatcher)); } } - public void testDeprecationWarningsAppearInHeaders() throws IOException { + public void testDeprecationWarningsAppearInHeaders() throws Exception { doTestDeprecationWarningsAppearInHeaders(); } - public void testDeprecationHeadersDoNotGetStuck() throws IOException { + public void testDeprecationHeadersDoNotGetStuck() throws Exception { doTestDeprecationWarningsAppearInHeaders(); doTestDeprecationWarningsAppearInHeaders(); if (rarely()) { @@ -140,7 +136,7 @@ public class DeprecationHttpIT extends HttpSmokeTestCase { *

* Re-running this back-to-back helps to ensure that warnings are not being maintained across requests. */ - private void doTestDeprecationWarningsAppearInHeaders() throws IOException { + private void doTestDeprecationWarningsAppearInHeaders() throws Exception { final boolean useDeprecatedField = randomBoolean(); final boolean useNonDeprecatedSetting = randomBoolean(); @@ -159,29 +155,26 @@ public class DeprecationHttpIT extends HttpSmokeTestCase { Collections.shuffle(settings, random()); // trigger all deprecations - try (Response response = getRestClient().performRequest("GET", - "/_test_cluster/deprecated_settings", - Collections.emptyMap(), - buildSettingsRequest(settings, useDeprecatedField))) { - assertThat(response.getStatusLine().getStatusCode(), equalTo(OK.getStatus())); + Response response = getRestClient().performRequest("GET", "/_test_cluster/deprecated_settings", + Collections.emptyMap(), buildSettingsRequest(settings, useDeprecatedField)); + assertThat(response.getStatusLine().getStatusCode(), equalTo(OK.getStatus())); - final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); - final List> headerMatchers = new ArrayList<>(4); + final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); + final List> headerMatchers = new ArrayList<>(4); - headerMatchers.add(equalTo(TestDeprecationHeaderRestAction.DEPRECATED_ENDPOINT)); - if (useDeprecatedField) { - headerMatchers.add(equalTo(TestDeprecationHeaderRestAction.DEPRECATED_USAGE)); - } - for (Setting setting : settings) { - if (setting.isDeprecated()) { - headerMatchers.add(containsString(LoggerMessageFormat.format("[{}] setting was deprecated", (Object)setting.getKey()))); - } + headerMatchers.add(equalTo(TestDeprecationHeaderRestAction.DEPRECATED_ENDPOINT)); + if (useDeprecatedField) { + headerMatchers.add(equalTo(TestDeprecationHeaderRestAction.DEPRECATED_USAGE)); + } + for (Setting setting : settings) { + if (setting.isDeprecated()) { + headerMatchers.add(containsString(LoggerMessageFormat.format("[{}] setting was deprecated", (Object)setting.getKey()))); } + } - assertThat(deprecatedWarnings, hasSize(headerMatchers.size())); - for (Matcher headerMatcher : headerMatchers) { - assertThat(deprecatedWarnings, hasItem(headerMatcher)); - } + assertThat(deprecatedWarnings, hasSize(headerMatchers.size())); + for (Matcher headerMatcher : headerMatchers) { + assertThat(deprecatedWarnings, hasItem(headerMatcher)); } } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java index feca7cd1d5f..5e53599b934 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java @@ -19,11 +19,11 @@ package org.elasticsearch.http; +import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -59,7 +59,7 @@ public class DetailedErrorsDisabledIT extends HttpSmokeTestCase { } catch(ResponseException e) { Response response = e.getResponse(); assertThat(response.getHeader("Content-Type"), is("application/json")); - assertThat(e.getResponseBody(), is("{\"error\":\"error traces in responses are disabled.\"}")); + assertThat(EntityUtils.toString(e.getResponse().getEntity()), is("{\"error\":\"error traces in responses are disabled.\"}")); assertThat(response.getStatusLine().getStatusCode(), is(400)); } } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsEnabledIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsEnabledIT.java index daabb1bc70d..fb26e59a1a5 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsEnabledIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsEnabledIT.java @@ -19,13 +19,10 @@ package org.elasticsearch.http; +import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; import java.util.Collections; @@ -44,7 +41,8 @@ public class DetailedErrorsEnabledIT extends HttpSmokeTestCase { } catch(ResponseException e) { Response response = e.getResponse(); assertThat(response.getHeader("Content-Type"), containsString("application/json")); - assertThat(e.getResponseBody(), containsString("\"stack_trace\":\"[Validation Failed: 1: index / indices is missing;]; " + + assertThat(EntityUtils.toString(response.getEntity()), + containsString("\"stack_trace\":\"[Validation Failed: 1: index / indices is missing;]; " + "nested: ActionRequestValidationException[Validation Failed: 1:")); } @@ -54,7 +52,8 @@ public class DetailedErrorsEnabledIT extends HttpSmokeTestCase { } catch(ResponseException e) { Response response = e.getResponse(); assertThat(response.getHeader("Content-Type"), containsString("application/json")); - assertThat(e.getResponseBody(), not(containsString("\"stack_trace\":\"[Validation Failed: 1: index / indices is missing;]; " + assertThat(EntityUtils.toString(response.getEntity()), + not(containsString("\"stack_trace\":\"[Validation Failed: 1: index / indices is missing;]; " + "nested: ActionRequestValidationException[Validation Failed: 1:"))); } } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java index f08bb2b4a9e..ca637a78555 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java @@ -18,19 +18,13 @@ */ package org.elasticsearch.http; -import org.apache.http.Header; -import org.apache.http.HttpException; import org.apache.http.HttpHeaders; -import org.apache.http.HttpResponseInterceptor; import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.message.BasicHeader; -import org.apache.http.protocol.HttpContext; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.test.ESIntegTestCase; -import java.io.IOException; import java.util.Collections; public class HttpCompressionIT extends ESIntegTestCase { @@ -50,99 +44,23 @@ public class HttpCompressionIT extends ESIntegTestCase { public void testCompressesResponseIfRequested() throws Exception { ensureGreen(); - // we need to intercept early, otherwise internal logic in HttpClient will just remove the header and we cannot verify it - ContentEncodingHeaderExtractor headerExtractor = new ContentEncodingHeaderExtractor(); - try (RestClient client = createRestClient(new ContentEncodingHeaderExtractorConfigCallback(headerExtractor))) { - try (Response response = client.performRequest("GET", "/", new BasicHeader(HttpHeaders.ACCEPT_ENCODING, GZIP_ENCODING))) { - assertEquals(200, response.getStatusLine().getStatusCode()); - assertTrue(headerExtractor.hasContentEncodingHeader()); - assertEquals(GZIP_ENCODING, headerExtractor.getContentEncodingHeader().getValue()); - } + try (RestClient client = getRestClient()) { + Response response = client.performRequest("GET", "/", new BasicHeader(HttpHeaders.ACCEPT_ENCODING, GZIP_ENCODING)); + assertEquals(200, response.getStatusLine().getStatusCode()); + assertEquals(GZIP_ENCODING, response.getHeader(HttpHeaders.CONTENT_ENCODING)); } } public void testUncompressedResponseByDefault() throws Exception { ensureGreen(); - ContentEncodingHeaderExtractor headerExtractor = new ContentEncodingHeaderExtractor(); - try (RestClient client = createRestClient(new NoContentCompressionConfigCallback(headerExtractor))) { - try (Response response = client.performRequest("GET", "/")) { - assertEquals(200, response.getStatusLine().getStatusCode()); - assertFalse(headerExtractor.hasContentEncodingHeader()); - } - } - } + try (RestClient client = getRestClient()) { + Response response = client.performRequest("GET", "/"); + assertEquals(200, response.getStatusLine().getStatusCode()); + assertNull(response.getHeader(HttpHeaders.CONTENT_ENCODING)); - public void testCanInterpretUncompressedRequest() throws Exception { - ensureGreen(); - ContentEncodingHeaderExtractor headerExtractor = new ContentEncodingHeaderExtractor(); - // this disable content compression in both directions (request and response) - try (RestClient client = createRestClient(new NoContentCompressionConfigCallback(headerExtractor))) { - try (Response response = client.performRequest("POST", "/company/employees/1", - Collections.emptyMap(), SAMPLE_DOCUMENT)) { - assertEquals(201, response.getStatusLine().getStatusCode()); - assertFalse(headerExtractor.hasContentEncodingHeader()); - } - } - } - - public void testCanInterpretCompressedRequest() throws Exception { - ensureGreen(); - ContentEncodingHeaderExtractor headerExtractor = new ContentEncodingHeaderExtractor(); - // we don't call #disableContentCompression() hence the client will send the content compressed - try (RestClient client = createRestClient(new ContentEncodingHeaderExtractorConfigCallback(headerExtractor))) { - try (Response response = client.performRequest("POST", "/company/employees/2", - Collections.emptyMap(), SAMPLE_DOCUMENT)) { - assertEquals(201, response.getStatusLine().getStatusCode()); - assertEquals(GZIP_ENCODING, headerExtractor.getContentEncodingHeader().getValue()); - } - } - } - - private static class ContentEncodingHeaderExtractor implements HttpResponseInterceptor { - private Header contentEncodingHeader; - - @Override - public void process(org.apache.http.HttpResponse response, HttpContext context) throws HttpException, IOException { - final Header[] headers = response.getHeaders(HttpHeaders.CONTENT_ENCODING); - if (headers.length == 1) { - this.contentEncodingHeader = headers[0]; - } else if (headers.length > 1) { - throw new AssertionError("Expected none or one content encoding header but got " + headers.length + " headers."); - } - } - - public boolean hasContentEncodingHeader() { - return contentEncodingHeader != null; - } - - public Header getContentEncodingHeader() { - return contentEncodingHeader; - } - } - - private static class NoContentCompressionConfigCallback extends ContentEncodingHeaderExtractorConfigCallback { - NoContentCompressionConfigCallback(ContentEncodingHeaderExtractor contentEncodingHeaderExtractor) { - super(contentEncodingHeaderExtractor); - } - - @Override - public void customizeHttpClient(HttpClientBuilder httpClientBuilder) { - super.customizeHttpClient(httpClientBuilder); - httpClientBuilder.disableContentCompression(); - } - } - - private static class ContentEncodingHeaderExtractorConfigCallback implements RestClient.HttpClientConfigCallback { - - private final ContentEncodingHeaderExtractor contentEncodingHeaderExtractor; - - ContentEncodingHeaderExtractorConfigCallback(ContentEncodingHeaderExtractor contentEncodingHeaderExtractor) { - this.contentEncodingHeaderExtractor = contentEncodingHeaderExtractor; - } - - @Override - public void customizeHttpClient(HttpClientBuilder httpClientBuilder) { - httpClientBuilder.addInterceptorFirst(contentEncodingHeaderExtractor); + response = client.performRequest("POST", "/company/employees/1", Collections.emptyMap(), SAMPLE_DOCUMENT); + assertEquals(201, response.getStatusLine().getStatusCode()); + assertNull(response.getHeader(HttpHeaders.CONTENT_ENCODING)); } } } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ResponseHeaderPluginIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ResponseHeaderPluginIT.java index 482edc36702..037549ada06 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ResponseHeaderPluginIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ResponseHeaderPluginIT.java @@ -67,9 +67,8 @@ public class ResponseHeaderPluginIT extends HttpSmokeTestCase { assertThat(response.getHeader("Secret"), equalTo("required")); } - try (Response authResponse = getRestClient().performRequest("GET", "/_protected", new BasicHeader("Secret", "password"))) { - assertThat(authResponse.getStatusLine().getStatusCode(), equalTo(200)); - assertThat(authResponse.getHeader("Secret"), equalTo("granted")); - } + Response authResponse = getRestClient().performRequest("GET", "/_protected", new BasicHeader("Secret", "password")); + assertThat(authResponse.getStatusLine().getStatusCode(), equalTo(200)); + assertThat(authResponse.getHeader("Secret"), equalTo("granted")); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index e8895aa90db..2df619a3a11 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -299,7 +299,7 @@ public abstract class ESRestTestCase extends ESTestCase { * other tests. */ @After - public void logIfThereAreRunningTasks() throws InterruptedException, IOException { + public void logIfThereAreRunningTasks() throws Exception { RestTestResponse tasks = adminExecutionContext.callApi("tasks.list", emptyMap(), emptyList(), emptyMap()); Set runningTasks = runningTasks(tasks); // Ignore the task list API - it doens't count against us @@ -341,7 +341,7 @@ public abstract class ESRestTestCase extends ESTestCase { } @Before - public void reset() throws IOException { + public void reset() throws Exception { // admin context must be available for @After always, regardless of whether the test was blacklisted adminExecutionContext.initClient(clusterUrls, restAdminSettings()); adminExecutionContext.clear(); @@ -378,7 +378,7 @@ public abstract class ESRestTestCase extends ESTestCase { return messageBuilder.toString(); } - public void test() throws IOException { + public void test() throws Exception { //let's check that there is something to run, otherwise there might be a problem with the test section if (testCandidate.getTestSection().getExecutableSections().size() == 0) { throw new IllegalArgumentException("No executable sections loaded for [" + testCandidate.getTestPath() + "]"); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java index d7295e1dca7..cde95ff9812 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java @@ -62,7 +62,7 @@ public class RestTestExecutionContext implements Closeable { * Saves the obtained response in the execution context. */ public RestTestResponse callApi(String apiName, Map params, List> bodies, - Map headers) throws IOException { + Map headers) throws Exception { //makes a copy of the parameters before modifying them for this specific request HashMap requestParams = new HashMap<>(params); for (Map.Entry entry : requestParams.entrySet()) { @@ -79,7 +79,7 @@ public class RestTestExecutionContext implements Closeable { stash.stashValue("body", response.getBody()); return response; } catch(ResponseException e) { - response = new RestTestResponse(e); + response = new RestTestResponse(e.getResponse()); throw e; } } @@ -105,7 +105,7 @@ public class RestTestExecutionContext implements Closeable { } private RestTestResponse callApiInternal(String apiName, Map params, String body, Map headers) - throws IOException { + throws Exception { return restTestClient.callApi(apiName, params, body, headers); } @@ -119,7 +119,7 @@ public class RestTestExecutionContext implements Closeable { /** * Creates the embedded REST client when needed. Needs to be called before each test. */ - public void initClient(URL[] urls, Settings settings) throws IOException { + public void initClient(URL[] urls, Settings settings) throws Exception { if (restTestClient == null) { restTestClient = new RestTestClient(restSpec, settings, urls); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java index c97ca7cd2fd..40fae122a5e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java @@ -22,16 +22,15 @@ import com.carrotsearch.randomizedtesting.RandomizedTest; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; -import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHeader; +import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.ssl.SSLContexts; import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.SSLSocketFactoryHttpConfigCallback; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; @@ -83,7 +82,7 @@ public class RestTestClient implements Closeable { private final RestClient restClient; private final Version esVersion; - public RestTestClient(RestSpec restSpec, Settings settings, URL[] urls) throws IOException { + public RestTestClient(RestSpec restSpec, Settings settings, URL[] urls) throws Exception { assert urls.length > 0; this.restSpec = restSpec; this.restClient = createRestClient(urls, settings); @@ -91,7 +90,7 @@ public class RestTestClient implements Closeable { logger.info("REST client initialized {}, elasticsearch version: [{}]", urls, esVersion); } - private Version readAndCheckVersion(URL[] urls) throws IOException { + private Version readAndCheckVersion(URL[] urls) throws Exception { RestApi restApi = restApi("info"); assert restApi.getPaths().size() == 1; assert restApi.getMethods().size() == 1; @@ -126,7 +125,7 @@ public class RestTestClient implements Closeable { * Calls an api with the provided parameters and body */ public RestTestResponse callApi(String apiName, Map params, String body, Map headers) - throws IOException { + throws Exception { if ("raw".equals(apiName)) { // Raw requests are bit simpler.... @@ -247,7 +246,7 @@ public class RestTestClient implements Closeable { return new RestTestResponse(response); } catch(ResponseException e) { if (ignores.contains(e.getResponse().getStatusLine().getStatusCode())) { - return new RestTestResponse(e); + return new RestTestResponse(e.getResponse()); } throw e; } @@ -287,8 +286,8 @@ public class RestTestClient implements Closeable { keyStore.load(is, keystorePass.toCharArray()); } SSLContext sslcontext = SSLContexts.custom().loadTrustMaterial(keyStore, null).build(); - SSLConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(sslcontext); - builder.setHttpClientConfigCallback(new SSLSocketFactoryHttpConfigCallback(sslConnectionSocketFactory)); + SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(sslcontext); + builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy)); } catch (KeyStoreException|NoSuchAlgorithmException|KeyManagementException|CertificateException e) { throw new RuntimeException(e); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java index 4644b87b8e7..27e1abdbdba 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java @@ -20,9 +20,7 @@ package org.elasticsearch.test.rest.client; import org.apache.http.client.methods.HttpHead; import org.apache.http.util.EntityUtils; -import org.apache.lucene.util.IOUtils; import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.rest.Stash; @@ -48,8 +46,6 @@ public class RestTestResponse { } catch (IOException e) { EntityUtils.consumeQuietly(response.getEntity()); throw new RuntimeException(e); - } finally { - IOUtils.closeWhileHandlingException(response); } } else { this.body = null; @@ -57,12 +53,6 @@ public class RestTestResponse { parseResponseBody(); } - public RestTestResponse(ResponseException responseException) throws IOException { - this.response = responseException.getResponse(); - this.body = responseException.getResponseBody(); - parseResponseBody(); - } - private void parseResponseBody() throws IOException { if (body != null) { String contentType = response.getHeader("Content-Type"); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java index 2547d6becea..8b242c53986 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java @@ -79,7 +79,7 @@ public class DoSection implements ExecutableSection { } @Override - public void execute(RestTestExecutionContext executionContext) throws IOException { + public void execute(RestTestExecutionContext executionContext) throws Exception { if ("param".equals(catchParam)) { //client should throw validation error before sending request @@ -103,7 +103,7 @@ public class DoSection implements ExecutableSection { fail(formatStatusCodeMessage(restTestResponse, catchStatusCode)); } } catch(ResponseException e) { - RestTestResponse restTestResponse = new RestTestResponse(e); + RestTestResponse restTestResponse = new RestTestResponse(e.getResponse()); if (!Strings.hasLength(catchParam)) { fail(formatStatusCodeMessage(restTestResponse, "2xx")); } else if (catches.containsKey(catchParam)) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java index 669d82cdd78..ece972b77f9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java @@ -20,8 +20,6 @@ package org.elasticsearch.test.rest.section; import org.elasticsearch.test.rest.RestTestExecutionContext; -import java.io.IOException; - /** * Represents a test fragment that can be executed (e.g. api call, assertion) */ @@ -30,5 +28,5 @@ public interface ExecutableSection { /** * Executes the section passing in the execution context */ - void execute(RestTestExecutionContext executionContext) throws IOException; + void execute(RestTestExecutionContext executionContext) throws Exception; } From bb21009772f5bd47ba45f3cf6d62b7ba334f4bde Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 5 Jul 2016 19:18:01 +0200 Subject: [PATCH 04/93] [TEST] add async entities to the randomization for RequestLoggerTests --- .../client/RequestLoggerTests.java | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/client/rest/src/test/java/org/elasticsearch/client/RequestLoggerTests.java b/client/rest/src/test/java/org/elasticsearch/client/RequestLoggerTests.java index 4d3ad75b5e8..f00060a02de 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RequestLoggerTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RequestLoggerTests.java @@ -35,6 +35,8 @@ import org.apache.http.entity.InputStreamEntity; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHttpResponse; import org.apache.http.message.BasicStatusLine; +import org.apache.http.nio.entity.NByteArrayEntity; +import org.apache.http.nio.entity.NStringEntity; import org.apache.http.util.EntityUtils; import java.io.ByteArrayInputStream; @@ -97,14 +99,24 @@ public class RequestLoggerTests extends RestClientTestCase { expected += " -d '" + requestBody + "'"; HttpEntityEnclosingRequest enclosingRequest = (HttpEntityEnclosingRequest) request; HttpEntity entity; - if (getRandom().nextBoolean()) { - entity = new StringEntity(requestBody, StandardCharsets.UTF_8); - } else { - entity = new InputStreamEntity(new ByteArrayInputStream(requestBody.getBytes(StandardCharsets.UTF_8))); + switch(RandomInts.randomIntBetween(getRandom(), 0, 3)) { + case 0: + entity = new StringEntity(requestBody, StandardCharsets.UTF_8); + break; + case 1: + entity = new InputStreamEntity(new ByteArrayInputStream(requestBody.getBytes(StandardCharsets.UTF_8))); + break; + case 2: + entity = new NStringEntity(requestBody, StandardCharsets.UTF_8); + break; + case 3: + entity = new NByteArrayEntity(requestBody.getBytes(StandardCharsets.UTF_8)); + break; + default: + throw new UnsupportedOperationException(); } enclosingRequest.setEntity(entity); } - String traceRequest = RequestLogger.buildTraceRequest(request, host); assertThat(traceRequest, equalTo(expected)); if (hasBody) { From 41e97a7cb162b6beb45d199e8f056390843add0e Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jul 2016 18:35:56 +0200 Subject: [PATCH 05/93] RestClient: take builder out to its own class The RestClient class is getting bigger and bigger, its builder can definitely be taken out to its own top level class: RestClientBuilder --- .../org/elasticsearch/client/RestClient.java | 158 +--------------- .../client/RestClientBuilder.java | 175 ++++++++++++++++++ .../client/RestClientBuilderTests.java | 6 +- .../elasticsearch/client/sniff/Sniffer.java | 3 +- .../elasticsearch/test/ESIntegTestCase.java | 7 +- .../test/rest/client/RestTestClient.java | 3 +- 6 files changed, 190 insertions(+), 162 deletions(-) create mode 100644 client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index b5bfd6ee07c..6db51a5ac52 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -27,7 +27,6 @@ import org.apache.http.HttpHost; import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; -import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpOptions; @@ -39,12 +38,8 @@ import org.apache.http.client.methods.HttpTrace; import org.apache.http.client.utils.URIBuilder; import org.apache.http.concurrent.FutureCallback; import org.apache.http.entity.ContentType; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; -import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.apache.http.nio.client.methods.HttpAsyncMethods; -import org.apache.http.nio.conn.SchemeIOSessionStrategy; import org.apache.http.nio.protocol.HttpAsyncRequestProducer; import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; @@ -70,7 +65,7 @@ import java.util.concurrent.atomic.AtomicInteger; /** * Client that connects to an elasticsearch cluster through http. - * Must be created using {@link Builder}, which allows to set all the different options or just rely on defaults. + * Must be created using {@link RestClientBuilder}, which allows to set all the different options or just rely on defaults. * The hosts that are part of the cluster need to be provided at creation time, but can also be replaced later * by calling {@link #setHosts(HttpHost...)}. * The method {@link #performRequest(String, String, Map, HttpEntity, Header...)} allows to send a request to the cluster. When @@ -509,7 +504,6 @@ public final class RestClient implements Closeable { public void onSuccess(Response response) { this.response = response; latch.countDown(); - } @Override @@ -530,154 +524,10 @@ public final class RestClient implements Closeable { } /** - * Returns a new {@link Builder} to help with {@link RestClient} creation. + * Returns a new {@link RestClientBuilder} to help with {@link RestClient} creation. */ - public static Builder builder(HttpHost... hosts) { - return new Builder(hosts); - } - - /** - * Helps creating a new {@link RestClient}. Allows to set the most common http client configuration options when internally - * creating the underlying {@link org.apache.http.nio.client.HttpAsyncClient}. Also allows to provide an externally created - * {@link org.apache.http.nio.client.HttpAsyncClient} in case additional customization is needed. - */ - public static final class Builder { - public static final int DEFAULT_CONNECT_TIMEOUT_MILLIS = 1000; - public static final int DEFAULT_SOCKET_TIMEOUT_MILLIS = 10000; - public static final int DEFAULT_MAX_RETRY_TIMEOUT_MILLIS = DEFAULT_SOCKET_TIMEOUT_MILLIS; - public static final int DEFAULT_CONNECTION_REQUEST_TIMEOUT_MILLIS = 500; - public static final int DEFAULT_MAX_CONN_PER_ROUTE = 10; - public static final int DEFAULT_MAX_CONN_TOTAL = 30; - - private static final Header[] EMPTY_HEADERS = new Header[0]; - - private final HttpHost[] hosts; - private int maxRetryTimeout = DEFAULT_MAX_RETRY_TIMEOUT_MILLIS; - private Header[] defaultHeaders = EMPTY_HEADERS; - private FailureListener failureListener; - private HttpClientConfigCallback httpClientConfigCallback; - private RequestConfigCallback requestConfigCallback; - - /** - * Creates a new builder instance and sets the hosts that the client will send requests to. - */ - private Builder(HttpHost... hosts) { - if (hosts == null || hosts.length == 0) { - throw new IllegalArgumentException("no hosts provided"); - } - this.hosts = hosts; - } - - /** - * Sets the default request headers, which will be sent along with each request - */ - public Builder setDefaultHeaders(Header[] defaultHeaders) { - Objects.requireNonNull(defaultHeaders, "defaultHeaders must not be null"); - for (Header defaultHeader : defaultHeaders) { - Objects.requireNonNull(defaultHeader, "default header must not be null"); - } - this.defaultHeaders = defaultHeaders; - return this; - } - - /** - * Sets the {@link FailureListener} to be notified for each request failure - */ - public Builder setFailureListener(FailureListener failureListener) { - Objects.requireNonNull(failureListener, "failureListener must not be null"); - this.failureListener = failureListener; - return this; - } - - /** - * Sets the maximum timeout (in milliseconds) to honour in case of multiple retries of the same request. - * {@link #DEFAULT_MAX_RETRY_TIMEOUT_MILLIS} if not specified. - * - * @throws IllegalArgumentException if maxRetryTimeoutMillis is not greater than 0 - */ - public Builder setMaxRetryTimeoutMillis(int maxRetryTimeoutMillis) { - if (maxRetryTimeoutMillis <= 0) { - throw new IllegalArgumentException("maxRetryTimeoutMillis must be greater than 0"); - } - this.maxRetryTimeout = maxRetryTimeoutMillis; - return this; - } - - /** - * Sets the {@link HttpClientConfigCallback} to be used to customize http client configuration - */ - public Builder setHttpClientConfigCallback(HttpClientConfigCallback httpClientConfigCallback) { - Objects.requireNonNull(httpClientConfigCallback, "httpClientConfigCallback must not be null"); - this.httpClientConfigCallback = httpClientConfigCallback; - return this; - } - - /** - * Sets the {@link RequestConfigCallback} to be used to customize http client configuration - */ - public Builder setRequestConfigCallback(RequestConfigCallback requestConfigCallback) { - Objects.requireNonNull(requestConfigCallback, "requestConfigCallback must not be null"); - this.requestConfigCallback = requestConfigCallback; - return this; - } - - /** - * Creates a new {@link RestClient} based on the provided configuration. - */ - public RestClient build() { - if (failureListener == null) { - failureListener = new FailureListener(); - } - CloseableHttpAsyncClient httpClient = createHttpClient(); - httpClient.start(); - return new RestClient(httpClient, maxRetryTimeout, defaultHeaders, hosts, failureListener); - } - - private CloseableHttpAsyncClient createHttpClient() { - //default timeouts are all infinite - RequestConfig.Builder requestConfigBuilder = RequestConfig.custom().setConnectTimeout(DEFAULT_CONNECT_TIMEOUT_MILLIS) - .setSocketTimeout(DEFAULT_SOCKET_TIMEOUT_MILLIS) - .setConnectionRequestTimeout(DEFAULT_CONNECTION_REQUEST_TIMEOUT_MILLIS); - if (requestConfigCallback != null) { - requestConfigCallback.customizeRequestConfig(requestConfigBuilder); - } - - HttpAsyncClientBuilder httpClientBuilder = HttpAsyncClientBuilder.create().setDefaultRequestConfig(requestConfigBuilder.build()) - //default settings for connection pooling may be too constraining - .setMaxConnPerRoute(DEFAULT_MAX_CONN_PER_ROUTE).setMaxConnTotal(DEFAULT_MAX_CONN_TOTAL); - if (httpClientConfigCallback != null) { - httpClientConfigCallback.customizeHttpClient(httpClientBuilder); - } - return httpClientBuilder.build(); - } - } - - /** - * Callback used the default {@link RequestConfig} being set to the {@link CloseableHttpClient} - * @see HttpClientBuilder#setDefaultRequestConfig - */ - public interface RequestConfigCallback { - /** - * Allows to customize the {@link RequestConfig} that will be used with each request. - * It is common to customize the different timeout values through this method without losing any other useful default - * value that the {@link RestClient.Builder} internally sets. - */ - void customizeRequestConfig(RequestConfig.Builder requestConfigBuilder); - } - - /** - * Callback used to customize the {@link CloseableHttpClient} instance used by a {@link RestClient} instance. - * Allows to customize default {@link RequestConfig} being set to the client and any parameter that - * can be set through {@link HttpClientBuilder} - */ - public interface HttpClientConfigCallback { - /** - * Allows to customize the {@link CloseableHttpAsyncClient} being created and used by the {@link RestClient}. - * Commonly used to customize the default {@link org.apache.http.client.CredentialsProvider} for authentication - * or the {@link SchemeIOSessionStrategy} for communication through ssl without losing any other useful default - * value that the {@link RestClient.Builder} internally sets, like connection pooling. - */ - void customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder); + public static RestClientBuilder builder(HttpHost... hosts) { + return new RestClientBuilder(hosts); } /** diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java new file mode 100644 index 00000000000..aa761c6e03f --- /dev/null +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java @@ -0,0 +1,175 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.Header; +import org.apache.http.HttpHost; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; +import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; +import org.apache.http.nio.conn.SchemeIOSessionStrategy; + +import java.util.Objects; + +/** + * Helps creating a new {@link RestClient}. Allows to set the most common http client configuration options when internally + * creating the underlying {@link org.apache.http.nio.client.HttpAsyncClient}. Also allows to provide an externally created + * {@link org.apache.http.nio.client.HttpAsyncClient} in case additional customization is needed. + */ +public final class RestClientBuilder { + public static final int DEFAULT_CONNECT_TIMEOUT_MILLIS = 1000; + public static final int DEFAULT_SOCKET_TIMEOUT_MILLIS = 10000; + public static final int DEFAULT_MAX_RETRY_TIMEOUT_MILLIS = DEFAULT_SOCKET_TIMEOUT_MILLIS; + public static final int DEFAULT_CONNECTION_REQUEST_TIMEOUT_MILLIS = 500; + public static final int DEFAULT_MAX_CONN_PER_ROUTE = 10; + public static final int DEFAULT_MAX_CONN_TOTAL = 30; + + private static final Header[] EMPTY_HEADERS = new Header[0]; + + private final HttpHost[] hosts; + private int maxRetryTimeout = DEFAULT_MAX_RETRY_TIMEOUT_MILLIS; + private Header[] defaultHeaders = EMPTY_HEADERS; + private RestClient.FailureListener failureListener; + private HttpClientConfigCallback httpClientConfigCallback; + private RequestConfigCallback requestConfigCallback; + + /** + * Creates a new builder instance and sets the hosts that the client will send requests to. + */ + RestClientBuilder(HttpHost... hosts) { + if (hosts == null || hosts.length == 0) { + throw new IllegalArgumentException("no hosts provided"); + } + this.hosts = hosts; + } + + /** + * Sets the default request headers, which will be sent along with each request + */ + public RestClientBuilder setDefaultHeaders(Header[] defaultHeaders) { + Objects.requireNonNull(defaultHeaders, "defaultHeaders must not be null"); + for (Header defaultHeader : defaultHeaders) { + Objects.requireNonNull(defaultHeader, "default header must not be null"); + } + this.defaultHeaders = defaultHeaders; + return this; + } + + /** + * Sets the {@link RestClient.FailureListener} to be notified for each request failure + */ + public RestClientBuilder setFailureListener(RestClient.FailureListener failureListener) { + Objects.requireNonNull(failureListener, "failureListener must not be null"); + this.failureListener = failureListener; + return this; + } + + /** + * Sets the maximum timeout (in milliseconds) to honour in case of multiple retries of the same request. + * {@link #DEFAULT_MAX_RETRY_TIMEOUT_MILLIS} if not specified. + * + * @throws IllegalArgumentException if maxRetryTimeoutMillis is not greater than 0 + */ + public RestClientBuilder setMaxRetryTimeoutMillis(int maxRetryTimeoutMillis) { + if (maxRetryTimeoutMillis <= 0) { + throw new IllegalArgumentException("maxRetryTimeoutMillis must be greater than 0"); + } + this.maxRetryTimeout = maxRetryTimeoutMillis; + return this; + } + + /** + * Sets the {@link HttpClientConfigCallback} to be used to customize http client configuration + */ + public RestClientBuilder setHttpClientConfigCallback(HttpClientConfigCallback httpClientConfigCallback) { + Objects.requireNonNull(httpClientConfigCallback, "httpClientConfigCallback must not be null"); + this.httpClientConfigCallback = httpClientConfigCallback; + return this; + } + + /** + * Sets the {@link RequestConfigCallback} to be used to customize http client configuration + */ + public RestClientBuilder setRequestConfigCallback(RequestConfigCallback requestConfigCallback) { + Objects.requireNonNull(requestConfigCallback, "requestConfigCallback must not be null"); + this.requestConfigCallback = requestConfigCallback; + return this; + } + + /** + * Creates a new {@link RestClient} based on the provided configuration. + */ + public RestClient build() { + if (failureListener == null) { + failureListener = new RestClient.FailureListener(); + } + CloseableHttpAsyncClient httpClient = createHttpClient(); + httpClient.start(); + return new RestClient(httpClient, maxRetryTimeout, defaultHeaders, hosts, failureListener); + } + + private CloseableHttpAsyncClient createHttpClient() { + //default timeouts are all infinite + RequestConfig.Builder requestConfigBuilder = RequestConfig.custom().setConnectTimeout(DEFAULT_CONNECT_TIMEOUT_MILLIS) + .setSocketTimeout(DEFAULT_SOCKET_TIMEOUT_MILLIS) + .setConnectionRequestTimeout(DEFAULT_CONNECTION_REQUEST_TIMEOUT_MILLIS); + if (requestConfigCallback != null) { + requestConfigCallback.customizeRequestConfig(requestConfigBuilder); + } + + HttpAsyncClientBuilder httpClientBuilder = HttpAsyncClientBuilder.create().setDefaultRequestConfig(requestConfigBuilder.build()) + //default settings for connection pooling may be too constraining + .setMaxConnPerRoute(DEFAULT_MAX_CONN_PER_ROUTE).setMaxConnTotal(DEFAULT_MAX_CONN_TOTAL); + if (httpClientConfigCallback != null) { + httpClientConfigCallback.customizeHttpClient(httpClientBuilder); + } + return httpClientBuilder.build(); + } + + /** + * Callback used the default {@link RequestConfig} being set to the {@link CloseableHttpClient} + * @see HttpClientBuilder#setDefaultRequestConfig + */ + public interface RequestConfigCallback { + /** + * Allows to customize the {@link RequestConfig} that will be used with each request. + * It is common to customize the different timeout values through this method without losing any other useful default + * value that the {@link RestClientBuilder} internally sets. + */ + void customizeRequestConfig(RequestConfig.Builder requestConfigBuilder); + } + + /** + * Callback used to customize the {@link CloseableHttpClient} instance used by a {@link RestClient} instance. + * Allows to customize default {@link RequestConfig} being set to the client and any parameter that + * can be set through {@link HttpClientBuilder} + */ + public interface HttpClientConfigCallback { + /** + * Allows to customize the {@link CloseableHttpAsyncClient} being created and used by the {@link RestClient}. + * Commonly used to customize the default {@link org.apache.http.client.CredentialsProvider} for authentication + * or the {@link SchemeIOSessionStrategy} for communication through ssl without losing any other useful default + * value that the {@link RestClientBuilder} internally sets, like connection pooling. + */ + void customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder); + } +} diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java index dc1c03b06a3..c36039c1bb1 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java @@ -104,16 +104,16 @@ public class RestClientBuilderTests extends RestClientTestCase { for (int i = 0; i < numNodes; i++) { hosts[i] = new HttpHost("localhost", 9200 + i); } - RestClient.Builder builder = RestClient.builder(hosts); + RestClientBuilder builder = RestClient.builder(hosts); if (getRandom().nextBoolean()) { - builder.setHttpClientConfigCallback(new RestClient.HttpClientConfigCallback() { + builder.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { @Override public void customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { } }); } if (getRandom().nextBoolean()) { - builder.setRequestConfigCallback(new RestClient.RequestConfigCallback() { + builder.setRequestConfigCallback(new RestClientBuilder.RequestConfigCallback() { @Override public void customizeRequestConfig(RequestConfig.Builder requestConfigBuilder) { } diff --git a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/Sniffer.java b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/Sniffer.java index 74a28cdd222..61fa32e7b62 100644 --- a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/Sniffer.java +++ b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/Sniffer.java @@ -23,6 +23,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.HttpHost; import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; import java.io.Closeable; import java.io.IOException; @@ -39,7 +40,7 @@ import java.util.concurrent.atomic.AtomicBoolean; * Must be created via {@link Builder}, which allows to set all of the different options or rely on defaults. * A background task fetches the nodes through the {@link HostsSniffer} and sets them to the {@link RestClient} instance. * It is possible to perform sniffing on failure by creating a {@link SniffOnFailureListener} and providing it as an argument to - * {@link org.elasticsearch.client.RestClient.Builder#setFailureListener(RestClient.FailureListener)}. The Sniffer implementation + * {@link RestClientBuilder#setFailureListener(RestClient.FailureListener)}. The Sniffer implementation * needs to be lazily set to the previously created SniffOnFailureListener through {@link SniffOnFailureListener#setSniffer(Sniffer)}. */ public final class Sniffer implements Closeable { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 73edf000b96..9c51bf23bd2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -29,6 +29,7 @@ import org.apache.lucene.util.TestUtil; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.transport.MockTcpTransportPlugin; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ShardOperationFailedException; @@ -2094,11 +2095,11 @@ public abstract class ESIntegTestCase extends ESTestCase { return restClient; } - protected static RestClient createRestClient(RestClient.HttpClientConfigCallback httpClientConfigCallback) { + protected static RestClient createRestClient(RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback) { return createRestClient(httpClientConfigCallback, "http"); } - protected static RestClient createRestClient(RestClient.HttpClientConfigCallback httpClientConfigCallback, String protocol) { + protected static RestClient createRestClient(RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback, String protocol) { final NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().get(); final List nodes = nodeInfos.getNodes(); assertFalse(nodeInfos.hasFailures()); @@ -2111,7 +2112,7 @@ public abstract class ESIntegTestCase extends ESTestCase { hosts.add(new HttpHost(NetworkAddress.format(address.getAddress()), address.getPort(), protocol)); } } - RestClient.Builder builder = RestClient.builder(hosts.toArray(new HttpHost[hosts.size()])); + RestClientBuilder builder = RestClient.builder(hosts.toArray(new HttpHost[hosts.size()])); if (httpClientConfigCallback != null) { builder.setHttpClientConfigCallback(httpClientConfigCallback); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java index 40fae122a5e..aec404f62e7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java @@ -31,6 +31,7 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; @@ -267,7 +268,7 @@ public class RestTestClient implements Closeable { URL url = urls[i]; hosts[i] = new HttpHost(url.getHost(), url.getPort(), protocol); } - RestClient.Builder builder = RestClient.builder(hosts).setMaxRetryTimeoutMillis(30000) + RestClientBuilder builder = RestClient.builder(hosts).setMaxRetryTimeoutMillis(30000) .setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setSocketTimeout(30000)); String keystorePath = settings.get(TRUSTSTORE_PATH); From e27203534a1f5cd9c47d8645b6d3fc3b37a2ac0f Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jul 2016 18:49:36 +0200 Subject: [PATCH 06/93] Rest Client: improve listener naming --- .../main/java/org/elasticsearch/client/RestClient.java | 10 +++++----- ...Listener.java => HostsTrackingFailureListener.java} | 4 ++-- .../client/RestClientMultipleHostsTests.java | 4 ++-- .../client/RestClientSingleHostTests.java | 4 ++-- 4 files changed, 11 insertions(+), 11 deletions(-) rename client/rest/src/test/java/org/elasticsearch/client/{TrackingFailureListener.java => HostsTrackingFailureListener.java} (92%) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 6db51a5ac52..a511ddbaa20 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -248,14 +248,14 @@ public final class RestClient implements Closeable { URI uri = buildUri(endpoint, params); HttpRequestBase request = createHttpRequest(method, uri, entity); setHeaders(request, headers); - FailureTrackingListener failureTrackingListener = new FailureTrackingListener(responseListener); + FailureTrackingResponseListener failureTrackingResponseListener = new FailureTrackingResponseListener(responseListener); long startTime = System.nanoTime(); - performRequest(startTime, nextHost().iterator(), request, responseConsumer, failureTrackingListener); + performRequest(startTime, nextHost().iterator(), request, responseConsumer, failureTrackingResponseListener); } private void performRequest(final long startTime, final Iterator hosts, final HttpRequestBase request, final HttpAsyncResponseConsumer responseConsumer, - final FailureTrackingListener listener) { + final FailureTrackingResponseListener listener) { final HttpHost host = hosts.next(); //we stream the request body if the entity allows for it HttpAsyncRequestProducer requestProducer = HttpAsyncMethods.create(host, request); @@ -473,11 +473,11 @@ public final class RestClient implements Closeable { } } - private static class FailureTrackingListener { + private static class FailureTrackingResponseListener { private final ResponseListener responseListener; private volatile Exception exception; - FailureTrackingListener(ResponseListener responseListener) { + FailureTrackingResponseListener(ResponseListener responseListener) { this.responseListener = responseListener; } diff --git a/client/rest/src/test/java/org/elasticsearch/client/TrackingFailureListener.java b/client/rest/src/test/java/org/elasticsearch/client/HostsTrackingFailureListener.java similarity index 92% rename from client/rest/src/test/java/org/elasticsearch/client/TrackingFailureListener.java rename to client/rest/src/test/java/org/elasticsearch/client/HostsTrackingFailureListener.java index 92033b72cef..e2f0ba81f6e 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/TrackingFailureListener.java +++ b/client/rest/src/test/java/org/elasticsearch/client/HostsTrackingFailureListener.java @@ -29,9 +29,9 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; /** - * {@link org.elasticsearch.client.RestClient.FailureListener} impl that allows to track when it gets called + * {@link org.elasticsearch.client.RestClient.FailureListener} impl that allows to track when it gets called for which host. */ -class TrackingFailureListener extends RestClient.FailureListener { +class HostsTrackingFailureListener extends RestClient.FailureListener { private volatile Set hosts = new HashSet<>(); @Override diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java index 957802634bc..89eacd80fa9 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java @@ -66,7 +66,7 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { private RestClient restClient; private HttpHost[] httpHosts; - private TrackingFailureListener failureListener; + private HostsTrackingFailureListener failureListener; @Before @SuppressWarnings("unchecked") @@ -101,7 +101,7 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { for (int i = 0; i < numHosts; i++) { httpHosts[i] = new HttpHost("localhost", 9200 + i); } - failureListener = new TrackingFailureListener(); + failureListener = new HostsTrackingFailureListener(); restClient = new RestClient(httpClient, 10000, new Header[0], httpHosts, failureListener); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index 403b86753c7..f315592cdff 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -91,7 +91,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { private Header[] defaultHeaders; private HttpHost httpHost; private CloseableHttpAsyncClient httpClient; - private TrackingFailureListener failureListener; + private HostsTrackingFailureListener failureListener; @Before @SuppressWarnings("unchecked") @@ -141,7 +141,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { defaultHeaders[i] = new BasicHeader(headerName, headerValue); } httpHost = new HttpHost("localhost", 9200); - failureListener = new TrackingFailureListener(); + failureListener = new HostsTrackingFailureListener(); restClient = new RestClient(httpClient, 10000, defaultHeaders, new HttpHost[]{httpHost}, failureListener); } From e5006ed7b54ebcc52b283816d233686c05d8a39f Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jul 2016 19:49:08 +0200 Subject: [PATCH 07/93] Rest Client: have RestClientBuilder callback also return the same type as their argument HttpClientConfigCallback#customizeHttpClient now also returns the HttpClientBuilder so it can be completely replaced RequestConfigCallback#customizeRequestConfig now also returns the HttpClientBuilder so it can be completely replaced --- .../java/org/elasticsearch/client/RestClientBuilder.java | 8 ++++---- .../org/elasticsearch/client/RestClientBuilderTests.java | 6 ++++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java index aa761c6e03f..d3ae2e4c2a2 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java @@ -133,14 +133,14 @@ public final class RestClientBuilder { .setSocketTimeout(DEFAULT_SOCKET_TIMEOUT_MILLIS) .setConnectionRequestTimeout(DEFAULT_CONNECTION_REQUEST_TIMEOUT_MILLIS); if (requestConfigCallback != null) { - requestConfigCallback.customizeRequestConfig(requestConfigBuilder); + requestConfigBuilder = requestConfigCallback.customizeRequestConfig(requestConfigBuilder); } HttpAsyncClientBuilder httpClientBuilder = HttpAsyncClientBuilder.create().setDefaultRequestConfig(requestConfigBuilder.build()) //default settings for connection pooling may be too constraining .setMaxConnPerRoute(DEFAULT_MAX_CONN_PER_ROUTE).setMaxConnTotal(DEFAULT_MAX_CONN_TOTAL); if (httpClientConfigCallback != null) { - httpClientConfigCallback.customizeHttpClient(httpClientBuilder); + httpClientBuilder = httpClientConfigCallback.customizeHttpClient(httpClientBuilder); } return httpClientBuilder.build(); } @@ -155,7 +155,7 @@ public final class RestClientBuilder { * It is common to customize the different timeout values through this method without losing any other useful default * value that the {@link RestClientBuilder} internally sets. */ - void customizeRequestConfig(RequestConfig.Builder requestConfigBuilder); + RequestConfig.Builder customizeRequestConfig(RequestConfig.Builder requestConfigBuilder); } /** @@ -170,6 +170,6 @@ public final class RestClientBuilder { * or the {@link SchemeIOSessionStrategy} for communication through ssl without losing any other useful default * value that the {@link RestClientBuilder} internally sets, like connection pooling. */ - void customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder); + HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder); } } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java index c36039c1bb1..da8e93a59f5 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java @@ -108,14 +108,16 @@ public class RestClientBuilderTests extends RestClientTestCase { if (getRandom().nextBoolean()) { builder.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { @Override - public void customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { + public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { + return httpClientBuilder; } }); } if (getRandom().nextBoolean()) { builder.setRequestConfigCallback(new RestClientBuilder.RequestConfigCallback() { @Override - public void customizeRequestConfig(RequestConfig.Builder requestConfigBuilder) { + public RequestConfig.Builder customizeRequestConfig(RequestConfig.Builder requestConfigBuilder) { + return requestConfigBuilder; } }); } From 283090e2aee3ec88b24b48f580f31f6c44ecdf61 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jul 2016 20:09:23 +0200 Subject: [PATCH 08/93] add check for null hosts in RestClientBuilder, so it fails early Also delayed call to HttpAsyncClient#start so that if something goes wrong while creating the RestClient, the http client threads don't linger. In fact, if the constructor fails it is not possible to call close against the RestClient. --- .../java/org/elasticsearch/client/RestClientBuilder.java | 6 +++++- .../org/elasticsearch/client/RestClientBuilderTests.java | 6 +++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java index d3ae2e4c2a2..4d5b72eba49 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java @@ -59,6 +59,9 @@ public final class RestClientBuilder { if (hosts == null || hosts.length == 0) { throw new IllegalArgumentException("no hosts provided"); } + for (HttpHost host : hosts) { + Objects.requireNonNull(host, "host cannot be null"); + } this.hosts = hosts; } @@ -123,8 +126,9 @@ public final class RestClientBuilder { failureListener = new RestClient.FailureListener(); } CloseableHttpAsyncClient httpClient = createHttpClient(); + RestClient restClient = new RestClient(httpClient, maxRetryTimeout, defaultHeaders, hosts, failureListener); httpClient.start(); - return new RestClient(httpClient, maxRetryTimeout, defaultHeaders, hosts, failureListener); + return restClient; } private CloseableHttpAsyncClient createHttpClient() { diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java index da8e93a59f5..ca671862124 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderTests.java @@ -50,12 +50,16 @@ public class RestClientBuilderTests extends RestClientTestCase { } try { - RestClient.builder(new HttpHost[]{new HttpHost("localhost", 9200), null}).build(); + RestClient.builder(new HttpHost("localhost", 9200), null); fail("should have failed"); } catch(NullPointerException e) { assertEquals("host cannot be null", e.getMessage()); } + try (RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)).build()) { + assertNotNull(restClient); + } + try { RestClient.builder(new HttpHost("localhost", 9200)) .setMaxRetryTimeoutMillis(RandomInts.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0)); From 54fa997545f29a3e887cef6fa5a4d83eb57aaf09 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jul 2016 20:41:33 +0200 Subject: [PATCH 09/93] Reindex from remote: remove async client in favour of using RestClient performRequest async method --- .../index/reindex/TransportReindexAction.java | 11 +- .../remote/RemoteScrollableHitSource.java | 107 ++++-------------- .../RemoteScrollableHitSourceTests.java | 94 +++++++++------ 3 files changed, 89 insertions(+), 123 deletions(-) diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 3e6f806d293..2238bec433b 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -48,7 +48,6 @@ import org.elasticsearch.index.mapper.internal.TTLFieldMapper; import org.elasticsearch.index.mapper.internal.VersionFieldMapper; import org.elasticsearch.index.reindex.remote.RemoteInfo; import org.elasticsearch.index.reindex.remote.RemoteScrollableHitSource; -import org.elasticsearch.node.service.NodeService; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.tasks.Task; @@ -185,12 +184,10 @@ public class TransportReindexAction extends HandledTransportAction fail, AsyncClient client, BytesReference query, SearchRequest searchRequest) { + Consumer fail, RestClient client, BytesReference query, SearchRequest searchRequest) { super(logger, backoffPolicy, threadPool, countSearchRetry, fail); this.query = query; this.searchRequest = searchRequest; @@ -99,7 +97,7 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { } - void onStartResponse(Consumer onResponse, Response response) { + private void onStartResponse(Consumer onResponse, Response response) { if (Strings.hasLength(response.getScrollId()) && response.getHits().isEmpty()) { logger.debug("First response looks like a scan response. Jumping right to the second. scroll=[{}]", response.getScrollId()); doStartNextScroll(response.getScrollId(), timeValueMillis(0), onResponse); @@ -119,15 +117,10 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { // Need to throw out response.... client.performRequest("DELETE", scrollPath(), emptyMap(), scrollEntity(scrollId), new ResponseListener() { @Override - public void onResponse(InputStream response) { + public void onSuccess(org.elasticsearch.client.Response response) { logger.debug("Successfully cleared [{}]", scrollId); } - @Override - public void onRetryableFailure(Exception t) { - onFailure(t); - } - @Override public void onFailure(Exception t) { logger.warn("Failed to clear scroll [{}]", t, scrollId); @@ -135,7 +128,7 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { }); } - void execute(String method, String uri, Map params, HttpEntity entity, + private void execute(String method, String uri, Map params, HttpEntity entity, BiFunction parser, Consumer listener) { class RetryHelper extends AbstractRunnable { private final Iterator retries = backoffPolicy.iterator(); @@ -144,34 +137,35 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { protected void doRun() throws Exception { client.performRequest(method, uri, params, entity, new ResponseListener() { @Override - public void onResponse(InputStream content) { - T response; + public void onSuccess(org.elasticsearch.client.Response response) { + T parsedResponse; try { + InputStream content = response.getEntity().getContent(); XContent xContent = XContentFactory.xContentType(content).xContent(); try(XContentParser xContentParser = xContent.createParser(content)) { - response = parser.apply(xContentParser, () -> ParseFieldMatcher.STRICT); + parsedResponse = parser.apply(xContentParser, () -> ParseFieldMatcher.STRICT); } } catch (IOException e) { throw new ElasticsearchException("Error deserializing response", e); } - listener.accept(response); + listener.accept(parsedResponse); } @Override public void onFailure(Exception e) { - fail.accept(e); - } - - @Override - public void onRetryableFailure(Exception t) { - if (retries.hasNext()) { - TimeValue delay = retries.next(); - logger.trace("retrying rejected search after [{}]", t, delay); - countSearchRetry.run(); - threadPool.schedule(delay, ThreadPool.Names.SAME, RetryHelper.this); - } else { - fail.accept(t); + if (e instanceof ResponseException) { + ResponseException re = (ResponseException) e; + if (RestStatus.TOO_MANY_REQUESTS.getStatus() == re.getResponse().getStatusLine().getStatusCode()) { + if (retries.hasNext()) { + TimeValue delay = retries.next(); + logger.trace("retrying rejected search after [{}]", e, delay); + countSearchRetry.run(); + threadPool.schedule(delay, ThreadPool.Names.SAME, RetryHelper.this); + return; + } + } } + fail.accept(e); } }); } @@ -183,59 +177,4 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { } new RetryHelper().run(); } - - public interface AsyncClient extends Closeable { - void performRequest(String method, String uri, Map params, HttpEntity entity, ResponseListener listener); - } - - public interface ResponseListener extends ActionListener { - void onRetryableFailure(Exception t); - } - - public static class AsynchronizingRestClient implements AsyncClient { - private final ThreadPool threadPool; - private final RestClient restClient; - - public AsynchronizingRestClient(ThreadPool threadPool, RestClient restClient) { - this.threadPool = threadPool; - this.restClient = restClient; - } - - @Override - public void performRequest(String method, String uri, Map params, HttpEntity entity, - ResponseListener listener) { - /* - * We use the generic thread pool here because this client is blocking the generic thread pool is sized appropriately for some - * of the threads on it to be blocked, waiting on IO. It'd be a disaster if this ran on the listener thread pool, eating - * valuable threads needed to handle responses. Most other thread pool would probably not mind running this either, but the - * generic thread pool is the "most right" place for it to run. We could make our own thread pool for this but the generic - * thread pool already has plenty of capacity. - */ - threadPool.generic().execute(new AbstractRunnable() { - @Override - protected void doRun() throws Exception { - org.elasticsearch.client.Response response = restClient.performRequest(method, uri, params, entity); - InputStream markSupportedInputStream = new BufferedInputStream(response.getEntity().getContent()); - listener.onResponse(markSupportedInputStream); - } - - @Override - public void onFailure(Exception t) { - if (t instanceof ResponseException) { - ResponseException re = (ResponseException) t; - if (RestStatus.TOO_MANY_REQUESTS.getStatus() == re.getResponse().getStatusLine().getStatusCode()) { - listener.onRetryableFailure(t); - return; - } - } - listener.onFailure(t); - } - }); - } - - @Override - public void close() throws IOException { - restClient.close(); - } - } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index f8f3e82b4bb..b9f359c321c 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -19,31 +19,44 @@ package org.elasticsearch.index.reindex.remote; -import org.apache.http.HttpEntity; +import org.apache.http.HttpEntityEnclosingRequest; +import org.apache.http.HttpHost; +import org.apache.http.HttpResponse; +import org.apache.http.ProtocolVersion; +import org.apache.http.StatusLine; +import org.apache.http.concurrent.FutureCallback; +import org.apache.http.entity.InputStreamEntity; +import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; +import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; +import org.apache.http.message.BasicHttpResponse; +import org.apache.http.message.BasicStatusLine; +import org.apache.http.nio.protocol.HttpAsyncRequestProducer; +import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; import org.elasticsearch.Version; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.client.RestClient; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.reindex.ScrollableHitSource.Response; -import org.elasticsearch.index.reindex.remote.RemoteScrollableHitSource.ResponseListener; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; -import java.io.IOException; -import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.nio.charset.StandardCharsets; -import java.util.Map; import java.util.concurrent.Executor; +import java.util.concurrent.Future; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; @@ -53,6 +66,9 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class RemoteScrollableHitSourceTests extends ESTestCase { private final String FAKE_SCROLL_ID = "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll"; @@ -68,7 +84,7 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { threadPool = new TestThreadPool(getTestName()) { @Override public Executor executor(String name) { - return r -> r.run(); + return Runnable::run; } @Override @@ -307,6 +323,7 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { * Creates a hit source that doesn't make the remote request and instead returns data from some files. Also requests are always returned * synchronously rather than asynchronously. */ + @SuppressWarnings("unchecked") private RemoteScrollableHitSource sourceWithMockedRemoteCall(boolean mockRemoteVersion, String... paths) throws Exception { URL[] resources = new URL[paths.length]; for (int i = 0; i < paths.length; i++) { @@ -315,35 +332,48 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { throw new IllegalArgumentException("Couldn't find [" + paths[i] + "]"); } } - RemoteScrollableHitSource.AsyncClient client = new RemoteScrollableHitSource.AsyncClient() { + + CloseableHttpAsyncClient httpClient = mock(CloseableHttpAsyncClient.class); + when(httpClient.execute(any(HttpAsyncRequestProducer.class), any(HttpAsyncResponseConsumer.class), + any(FutureCallback.class))).thenAnswer(new Answer>() { + int responseCount = 0; - @Override - public void performRequest(String method, String uri, Map params, HttpEntity entity, - ResponseListener listener) { - try { - URL resource = resources[responseCount]; - String path = paths[responseCount++]; - InputStream stream = resource.openStream(); - if (path.startsWith("fail:")) { - String body = Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)); - if (path.equals("fail:rejection.json")) { - listener.onRetryableFailure(new RuntimeException(body)); - } else { - listener.onFailure(new RuntimeException(body)); - } - } else { - listener.onResponse(stream); - } - } catch (IOException e) { - listener.onFailure(e); - } - } @Override - public void close() throws IOException { + public Future answer(InvocationOnMock invocationOnMock) throws Throwable { + HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0]; + @SuppressWarnings("unchecked") + FutureCallback futureCallback = (FutureCallback) invocationOnMock.getArguments()[2]; + HttpEntityEnclosingRequest request = (HttpEntityEnclosingRequest)requestProducer.generateRequest(); + URL resource = resources[responseCount]; + String path = paths[responseCount++]; + ProtocolVersion protocolVersion = new ProtocolVersion("http", 1, 1); + if (path.startsWith("fail:")) { + String body = Streams.copyToString(new InputStreamReader(request.getEntity().getContent(), StandardCharsets.UTF_8)); + if (path.equals("fail:rejection.json")) { + StatusLine statusLine = new BasicStatusLine(protocolVersion, RestStatus.TOO_MANY_REQUESTS.getStatus(), ""); + BasicHttpResponse httpResponse = new BasicHttpResponse(statusLine); + futureCallback.completed(httpResponse); + } else { + futureCallback.failed(new RuntimeException(body)); + } + } else { + StatusLine statusLine = new BasicStatusLine(protocolVersion, 200, ""); + HttpResponse httpResponse = new BasicHttpResponse(statusLine); + httpResponse.setEntity(new InputStreamEntity(resource.openStream())); + futureCallback.completed(httpResponse); + } + return null; } - }; - TestRemoteScrollableHitSource hitSource = new TestRemoteScrollableHitSource(client) { + }); + + HttpAsyncClientBuilder clientBuilder = mock(HttpAsyncClientBuilder.class); + when(clientBuilder.build()).thenReturn(httpClient); + + RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)) + .setHttpClientConfigCallback(httpClientBuilder -> clientBuilder).build(); + + TestRemoteScrollableHitSource hitSource = new TestRemoteScrollableHitSource(restClient) { @Override void lookupRemoteVersion(Consumer onVersion) { if (mockRemoteVersion) { @@ -372,7 +402,7 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { } private class TestRemoteScrollableHitSource extends RemoteScrollableHitSource { - public TestRemoteScrollableHitSource(RemoteScrollableHitSource.AsyncClient client) { + TestRemoteScrollableHitSource(RestClient client) { super(RemoteScrollableHitSourceTests.this.logger, backoff(), RemoteScrollableHitSourceTests.this.threadPool, RemoteScrollableHitSourceTests.this::countRetry, RemoteScrollableHitSourceTests.this::failRequest, client, new BytesArray("{}"), RemoteScrollableHitSourceTests.this.searchRequest); From bce54cf38dae27d60b2656c14605445ac980fa4d Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 12 Jul 2016 21:35:45 +0200 Subject: [PATCH 10/93] reindex from remote to read content-type header rather than guessing content type based on content --- .../index/reindex/remote/RemoteScrollableHitSource.java | 8 ++++---- .../reindex/remote/RemoteScrollableHitSourceTests.java | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java index 8534d9002aa..c96b3efca8a 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java @@ -34,9 +34,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; -import org.elasticsearch.common.xcontent.XContent; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.reindex.ScrollableHitSource; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; @@ -141,8 +140,9 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { T parsedResponse; try { InputStream content = response.getEntity().getContent(); - XContent xContent = XContentFactory.xContentType(content).xContent(); - try(XContentParser xContentParser = xContent.createParser(content)) { + XContentType xContentType = XContentType.fromMediaTypeOrFormat( + response.getEntity().getContentType().getValue()); + try(XContentParser xContentParser = xContentType.xContent().createParser(content)) { parsedResponse = parser.apply(xContentParser, () -> ParseFieldMatcher.STRICT); } } catch (IOException e) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index b9f359c321c..c9dd25ec130 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -25,6 +25,7 @@ import org.apache.http.HttpResponse; import org.apache.http.ProtocolVersion; import org.apache.http.StatusLine; import org.apache.http.concurrent.FutureCallback; +import org.apache.http.entity.ContentType; import org.apache.http.entity.InputStreamEntity; import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; @@ -360,7 +361,7 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { } else { StatusLine statusLine = new BasicStatusLine(protocolVersion, 200, ""); HttpResponse httpResponse = new BasicHttpResponse(statusLine); - httpResponse.setEntity(new InputStreamEntity(resource.openStream())); + httpResponse.setEntity(new InputStreamEntity(resource.openStream(), ContentType.APPLICATION_JSON)); futureCallback.completed(httpResponse); } return null; From e742d65e0211377edfe04f2d0c14c86b31248378 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 13 Jul 2016 12:04:23 +0200 Subject: [PATCH 11/93] [TEST] Make sure the last response body is always available in our REST tests With the introduction of the async client, ResponseException doesn't eagerly read the response body anymore into a string. That is better, but raised a problem in our REST tests infra: we were reading the response body twice, while it can only be consumed once. Introduced a RestTestResponseException that wraps a ResponseException and exposes the body which now gets read only once. --- .../test/rest/ESRestTestCase.java | 6 +- .../test/rest/RestTestExecutionContext.java | 14 ++--- .../test/rest/client/RestTestClient.java | 13 +++-- .../test/rest/client/RestTestResponse.java | 2 +- .../client/RestTestResponseException.java | 55 +++++++++++++++++++ .../test/rest/section/DoSection.java | 9 ++- 6 files changed, 78 insertions(+), 21 deletions(-) create mode 100644 test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponseException.java diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 2df619a3a11..cbea7edd5af 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -22,13 +22,13 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.RandomizedTest; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.client.RestTestResponse; +import org.elasticsearch.test.rest.client.RestTestResponseException; import org.elasticsearch.test.rest.parser.RestTestParseException; import org.elasticsearch.test.rest.parser.RestTestSuiteParser; import org.elasticsearch.test.rest.section.DoSection; @@ -276,9 +276,9 @@ public abstract class ESRestTestCase extends ESTestCase { deleteIndicesArgs.put("index", "*"); try { adminExecutionContext.callApi("indices.delete", deleteIndicesArgs, Collections.emptyList(), Collections.emptyMap()); - } catch (ResponseException e) { + } catch (RestTestResponseException e) { // 404 here just means we had no indexes - if (e.getResponse().getStatusLine().getStatusCode() != 404) { + if (e.getResponseException().getResponse().getStatusLine().getStatusCode() != 404) { throw e; } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java index cde95ff9812..3be5a43e151 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java @@ -19,13 +19,13 @@ package org.elasticsearch.test.rest; import org.elasticsearch.Version; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.rest.client.RestTestClient; import org.elasticsearch.test.rest.client.RestTestResponse; +import org.elasticsearch.test.rest.client.RestTestResponseException; import org.elasticsearch.test.rest.spec.RestSpec; import java.io.Closeable; @@ -62,7 +62,7 @@ public class RestTestExecutionContext implements Closeable { * Saves the obtained response in the execution context. */ public RestTestResponse callApi(String apiName, Map params, List> bodies, - Map headers) throws Exception { + Map headers) throws Exception { //makes a copy of the parameters before modifying them for this specific request HashMap requestParams = new HashMap<>(params); for (Map.Entry entry : requestParams.entrySet()) { @@ -72,15 +72,15 @@ public class RestTestExecutionContext implements Closeable { } String body = actualBody(bodies); - try { response = callApiInternal(apiName, requestParams, body, headers); + return response; + } catch(RestTestResponseException e) { + response = e.getRestTestResponse(); + throw e; + } finally { //we always stash the last response body stash.stashValue("body", response.getBody()); - return response; - } catch(ResponseException e) { - response = new RestTestResponse(e.getResponse()); - throw e; } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java index aec404f62e7..1ba5960bf9a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java @@ -138,8 +138,12 @@ public class RestTestClient implements Closeable { entity = new StringEntity(body, RestClient.JSON_CONTENT_TYPE); } // And everything else is a url parameter! - Response response = restClient.performRequest(method, path, queryStringParams, entity); - return new RestTestResponse(response); + try { + Response response = restClient.performRequest(method, path, queryStringParams, entity); + return new RestTestResponse(response); + } catch(ResponseException e) { + throw new RestTestResponseException(e); + } } List ignores = new ArrayList<>(); @@ -242,14 +246,13 @@ public class RestTestClient implements Closeable { logger.debug("calling api [{}]", apiName); try { - Response response = restClient.performRequest(requestMethod, requestPath, - queryStringParams, requestBody, requestHeaders); + Response response = restClient.performRequest(requestMethod, requestPath, queryStringParams, requestBody, requestHeaders); return new RestTestResponse(response); } catch(ResponseException e) { if (ignores.contains(e.getResponse().getStatusLine().getStatusCode())) { return new RestTestResponse(e.getResponse()); } - throw e; + throw new RestTestResponseException(e); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java index 27e1abdbdba..9149824c33c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java @@ -38,7 +38,7 @@ public class RestTestResponse { private final String body; private ObjectPath parsedResponse; - public RestTestResponse(Response response) throws IOException { + RestTestResponse(Response response) throws IOException { this.response = response; if (response.getEntity() != null) { try { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponseException.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponseException.java new file mode 100644 index 00000000000..0a1a086b4e5 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponseException.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.rest.client; + +import org.elasticsearch.client.ResponseException; + +import java.io.IOException; + +/** + * Exception obtained from a REST call in case the response code indicated an error. Eagerly reads the response body into a string + * for later optional parsing. Supports parsing the response body when needed and returning specific values extracted from it. + */ +public class RestTestResponseException extends Exception { + + private final RestTestResponse restTestResponse; + private final ResponseException responseException; + + RestTestResponseException(ResponseException responseException) throws IOException { + super(responseException); + this.responseException = responseException; + this.restTestResponse = new RestTestResponse(responseException.getResponse()); + } + + /** + * Exposes the obtained response body + */ + public RestTestResponse getRestTestResponse() { + return restTestResponse; + } + + /** + * Exposes the origina {@link ResponseException}. Note that the entity will always be null as it + * gets eagerly consumed and exposed through {@link #getRestTestResponse()}. + */ + public ResponseException getResponseException() { + return responseException; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java index 8b242c53986..1f60db33378 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java @@ -18,15 +18,14 @@ */ package org.elasticsearch.test.rest.section; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.rest.RestTestExecutionContext; import org.elasticsearch.test.rest.client.RestTestResponse; +import org.elasticsearch.test.rest.client.RestTestResponseException; -import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -102,8 +101,8 @@ public class DoSection implements ExecutableSection { } fail(formatStatusCodeMessage(restTestResponse, catchStatusCode)); } - } catch(ResponseException e) { - RestTestResponse restTestResponse = new RestTestResponse(e.getResponse()); + } catch(RestTestResponseException e) { + RestTestResponse restTestResponse = e.getRestTestResponse(); if (!Strings.hasLength(catchParam)) { fail(formatStatusCodeMessage(restTestResponse, "2xx")); } else if (catches.containsKey(catchParam)) { @@ -111,7 +110,7 @@ public class DoSection implements ExecutableSection { } else if (catchParam.length() > 2 && catchParam.startsWith("/") && catchParam.endsWith("/")) { //the text of the error message matches regular expression assertThat(formatStatusCodeMessage(restTestResponse, "4xx|5xx"), - e.getResponse().getStatusLine().getStatusCode(), greaterThanOrEqualTo(400)); + e.getResponseException().getResponse().getStatusLine().getStatusCode(), greaterThanOrEqualTo(400)); Object error = executionContext.response("error"); assertThat("error was expected in the response", error, notNullValue()); //remove delimiters from regex From b6b92c64c09e8bda26067c419e17c49a78def377 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 13 Jul 2016 14:56:53 +0200 Subject: [PATCH 12/93] update Response javadocs --- .../rest/src/main/java/org/elasticsearch/client/Response.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/Response.java b/client/rest/src/main/java/org/elasticsearch/client/Response.java index be2a16f912d..91ca0a6c935 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/Response.java +++ b/client/rest/src/main/java/org/elasticsearch/client/Response.java @@ -25,14 +25,12 @@ import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.RequestLine; import org.apache.http.StatusLine; -import org.apache.http.client.methods.CloseableHttpResponse; import java.util.Objects; /** - * Holds an elasticsearch response. It wraps the {@link CloseableHttpResponse} response and associates it with + * Holds an elasticsearch response. It wraps the {@link HttpResponse} returned and associates it with * its corresponding {@link RequestLine} and {@link HttpHost}. - * It must be closed to free any resource held by it, as well as the corresponding connection in the connection pool. */ public class Response { From a3f9721751b32797f7a85b4129fcc7ab20108074 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 13 Jul 2016 14:57:57 +0200 Subject: [PATCH 13/93] replace till with until in RestClient javadocs --- .../java/org/elasticsearch/client/RestClient.java | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index a511ddbaa20..e2accdfaf82 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -72,7 +72,7 @@ import java.util.concurrent.atomic.AtomicInteger; * sending a request, a host gets selected out of the provided ones in a round-robin fashion. Failing hosts are marked dead and * retried after a certain amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times they previously * failed (the more failures, the later they will be retried). In case of failures all of the alive nodes (or dead nodes that - * deserve a retry) are retried till one responds or none of them does, in which case an {@link IOException} will be thrown. + * deserve a retry) are retried until one responds or none of them does, in which case an {@link IOException} will be thrown. * * Requests can be traced by enabling trace logging for "tracer". The trace logger outputs requests and responses in curl format. */ @@ -150,11 +150,11 @@ public final class RestClient implements Closeable { } /** - * Sends a request to the elasticsearch cluster that the current client points to. Blocks till the request is completed and returns + * Sends a request to the elasticsearch cluster that the current client points to. Blocks until the request is completed and returns * its response of fails by throwing an exception. Selects a host out of the provided ones in a round-robin fashion. Failing hosts * are marked dead and retried after a certain amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times * they previously failed (the more failures, the later they will be retried). In case of failures all of the alive nodes (or dead - * nodes that deserve a retry) are retried till one responds or none of them does, in which case an {@link IOException} will be thrown. + * nodes that deserve a retry) are retried until one responds or none of them does, in which case an {@link IOException} will be thrown. * * @param method the http method * @param endpoint the path of the request (without host and port) @@ -207,7 +207,6 @@ public final class RestClient implements Closeable { } /** - /** * Sends a request to the elasticsearch cluster that the current client points to. * Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumer, ResponseListener, Header...)} * but without an async response consumer, meaning that a {@link org.apache.http.nio.protocol.BasicAsyncResponseConsumer} @@ -232,7 +231,7 @@ public final class RestClient implements Closeable { * Selects a host out of the provided ones in a round-robin fashion. Failing hosts are marked dead and retried after a certain * amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times they previously failed (the more failures, * the later they will be retried). In case of failures all of the alive nodes (or dead nodes that deserve a retry) are retried - * till one responds or none of them does, in which case an {@link IOException} will be thrown. + * until one responds or none of them does, in which case an {@link IOException} will be thrown. * * @param method the http method * @param endpoint the path of the request (without host and port) @@ -336,7 +335,7 @@ public final class RestClient implements Closeable { /** * Returns an iterator of hosts to be used for a request call. * Ideally, the first host is retrieved from the iterator and used successfully for the request. - * Otherwise, after each failure the next host should be retrieved from the iterator so that the request can be retried till + * Otherwise, after each failure the next host should be retrieved from the iterator so that the request can be retried until * the iterator is exhausted. The maximum total of attempts is equal to the number of hosts that are available in the iterator. * The iterator returned will never be empty, rather an {@link IllegalStateException} in case there are no hosts. * In case there are no healthy hosts available, or dead ones to be be retried, one dead host gets returned. From 06caea6b80fc2496b92e728574e647b6135c2641 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 13 Jul 2016 15:13:29 +0200 Subject: [PATCH 14/93] move RestClient#builder method on top for more visibility --- .../java/org/elasticsearch/client/RestClient.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index e2accdfaf82..56baa35598f 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -99,6 +99,13 @@ public final class RestClient implements Closeable { setHosts(hosts); } + /** + * Returns a new {@link RestClientBuilder} to help with {@link RestClient} creation. + */ + public static RestClientBuilder builder(HttpHost... hosts) { + return new RestClientBuilder(hosts); + } + /** * Replaces the hosts that the client communicates with. * @see HttpHost @@ -522,13 +529,6 @@ public final class RestClient implements Closeable { } } - /** - * Returns a new {@link RestClientBuilder} to help with {@link RestClient} creation. - */ - public static RestClientBuilder builder(HttpHost... hosts) { - return new RestClientBuilder(hosts); - } - /** * Listener that allows to be notified whenever a failure happens. Useful when sniffing is enabled, so that we can sniff on failure. * The default implementation is a no-op. From 69309fb834aa3a6bf0c107f7a38dedf284360890 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 13 Jul 2016 15:20:53 +0200 Subject: [PATCH 15/93] [TEST] remove one too many SuppressWarnings --- .../org/elasticsearch/client/RestClientMultipleHostsTests.java | 1 - .../java/org/elasticsearch/client/RestClientSingleHostTests.java | 1 - 2 files changed, 2 deletions(-) diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java index 89eacd80fa9..5fdf331a01e 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java @@ -79,7 +79,6 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0]; HttpUriRequest request = (HttpUriRequest)requestProducer.generateRequest(); HttpHost httpHost = requestProducer.getTarget(); - @SuppressWarnings("unchecked") FutureCallback futureCallback = (FutureCallback) invocationOnMock.getArguments()[2]; //return the desired status code or exception depending on the path if (request.getURI().getPath().equals("/soe")) { diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index f315592cdff..62f8eb548f6 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -102,7 +102,6 @@ public class RestClientSingleHostTests extends RestClientTestCase { @Override public Future answer(InvocationOnMock invocationOnMock) throws Throwable { HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0]; - @SuppressWarnings("unchecked") FutureCallback futureCallback = (FutureCallback) invocationOnMock.getArguments()[2]; HttpUriRequest request = (HttpUriRequest)requestProducer.generateRequest(); //return the desired status code or exception depending on the path From f2ab597c84ab6118bcf2c5902898556a0e6d40a5 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 13 Jul 2016 15:37:50 +0200 Subject: [PATCH 16/93] Build: use license mapping for http* and commons-* This way we reduce the number of LICENSE and NOTICE files with same content for client.rest and client.sniffer projects. --- client/rest/build.gradle | 5 + ...-codec-LICENSE.txt => commons-LICENSE.txt} | 0 ...ns-codec-NOTICE.txt => commons-NOTICE.txt} | 0 .../rest/licenses/commons-logging-NOTICE.txt | 6 - .../rest/licenses/httpasyncclient-LICENSE.txt | 558 ------------------ .../rest/licenses/httpasyncclient-NOTICE.txt | 6 - client/rest/licenses/httpcore-LICENSE.txt | 558 ------------------ client/rest/licenses/httpcore-NOTICE.txt | 6 - client/rest/licenses/httpcore-nio-LICENSE.txt | 558 ------------------ client/rest/licenses/httpcore-nio-NOTICE.txt | 6 - client/sniffer/build.gradle | 5 + .../licenses/commons-LICENSE.txt} | 0 ...ns-codec-NOTICE.txt => commons-NOTICE.txt} | 0 .../licenses/commons-codec-LICENSE.txt | 202 ------- .../licenses/commons-logging-LICENSE.txt | 202 ------- .../licenses/commons-logging-NOTICE.txt | 6 - client/sniffer/licenses/httpcore-LICENSE.txt | 558 ------------------ client/sniffer/licenses/httpcore-NOTICE.txt | 6 - 18 files changed, 10 insertions(+), 2672 deletions(-) rename client/rest/licenses/{commons-codec-LICENSE.txt => commons-LICENSE.txt} (100%) rename client/rest/licenses/{commons-codec-NOTICE.txt => commons-NOTICE.txt} (100%) delete mode 100644 client/rest/licenses/commons-logging-NOTICE.txt delete mode 100644 client/rest/licenses/httpasyncclient-LICENSE.txt delete mode 100644 client/rest/licenses/httpasyncclient-NOTICE.txt delete mode 100644 client/rest/licenses/httpcore-LICENSE.txt delete mode 100644 client/rest/licenses/httpcore-NOTICE.txt delete mode 100644 client/rest/licenses/httpcore-nio-LICENSE.txt delete mode 100644 client/rest/licenses/httpcore-nio-NOTICE.txt rename client/{rest/licenses/commons-logging-LICENSE.txt => sniffer/licenses/commons-LICENSE.txt} (100%) rename client/sniffer/licenses/{commons-codec-NOTICE.txt => commons-NOTICE.txt} (100%) delete mode 100644 client/sniffer/licenses/commons-codec-LICENSE.txt delete mode 100644 client/sniffer/licenses/commons-logging-LICENSE.txt delete mode 100644 client/sniffer/licenses/commons-logging-NOTICE.txt delete mode 100644 client/sniffer/licenses/httpcore-LICENSE.txt delete mode 100644 client/sniffer/licenses/httpcore-NOTICE.txt diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 4673424241f..fabe5c65373 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -58,6 +58,11 @@ forbiddenApisTest { signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] } +dependencyLicenses { + mapping from: /http.*/, to: 'httpclient' + mapping from: /commons-.*/, to: 'commons' +} + //JarHell is part of es core, which we don't want to pull in jarHell.enabled=false diff --git a/client/rest/licenses/commons-codec-LICENSE.txt b/client/rest/licenses/commons-LICENSE.txt similarity index 100% rename from client/rest/licenses/commons-codec-LICENSE.txt rename to client/rest/licenses/commons-LICENSE.txt diff --git a/client/rest/licenses/commons-codec-NOTICE.txt b/client/rest/licenses/commons-NOTICE.txt similarity index 100% rename from client/rest/licenses/commons-codec-NOTICE.txt rename to client/rest/licenses/commons-NOTICE.txt diff --git a/client/rest/licenses/commons-logging-NOTICE.txt b/client/rest/licenses/commons-logging-NOTICE.txt deleted file mode 100644 index 556bd03951d..00000000000 --- a/client/rest/licenses/commons-logging-NOTICE.txt +++ /dev/null @@ -1,6 +0,0 @@ -Apache Commons Logging -Copyright 2003-2014 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - diff --git a/client/rest/licenses/httpasyncclient-LICENSE.txt b/client/rest/licenses/httpasyncclient-LICENSE.txt deleted file mode 100644 index 32f01eda18f..00000000000 --- a/client/rest/licenses/httpasyncclient-LICENSE.txt +++ /dev/null @@ -1,558 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - -========================================================================= - -This project includes Public Suffix List copied from - -licensed under the terms of the Mozilla Public License, v. 2.0 - -Full license text: - -Mozilla Public License Version 2.0 -================================== - -1. Definitions --------------- - -1.1. "Contributor" - means each individual or legal entity that creates, contributes to - the creation of, or owns Covered Software. - -1.2. "Contributor Version" - means the combination of the Contributions of others (if any) used - by a Contributor and that particular Contributor's Contribution. - -1.3. "Contribution" - means Covered Software of a particular Contributor. - -1.4. "Covered Software" - means Source Code Form to which the initial Contributor has attached - the notice in Exhibit A, the Executable Form of such Source Code - Form, and Modifications of such Source Code Form, in each case - including portions thereof. - -1.5. "Incompatible With Secondary Licenses" - means - - (a) that the initial Contributor has attached the notice described - in Exhibit B to the Covered Software; or - - (b) that the Covered Software was made available under the terms of - version 1.1 or earlier of the License, but not also under the - terms of a Secondary License. - -1.6. "Executable Form" - means any form of the work other than Source Code Form. - -1.7. "Larger Work" - means a work that combines Covered Software with other material, in - a separate file or files, that is not Covered Software. - -1.8. "License" - means this document. - -1.9. "Licensable" - means having the right to grant, to the maximum extent possible, - whether at the time of the initial grant or subsequently, any and - all of the rights conveyed by this License. - -1.10. "Modifications" - means any of the following: - - (a) any file in Source Code Form that results from an addition to, - deletion from, or modification of the contents of Covered - Software; or - - (b) any new file in Source Code Form that contains any Covered - Software. - -1.11. "Patent Claims" of a Contributor - means any patent claim(s), including without limitation, method, - process, and apparatus claims, in any patent Licensable by such - Contributor that would be infringed, but for the grant of the - License, by the making, using, selling, offering for sale, having - made, import, or transfer of either its Contributions or its - Contributor Version. - -1.12. "Secondary License" - means either the GNU General Public License, Version 2.0, the GNU - Lesser General Public License, Version 2.1, the GNU Affero General - Public License, Version 3.0, or any later versions of those - licenses. - -1.13. "Source Code Form" - means the form of the work preferred for making modifications. - -1.14. "You" (or "Your") - means an individual or a legal entity exercising rights under this - License. For legal entities, "You" includes any entity that - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants and Conditions --------------------------------- - -2.1. Grants - -Each Contributor hereby grants You a world-wide, royalty-free, -non-exclusive license: - -(a) under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or - as part of a Larger Work; and - -(b) under Patent Claims of such Contributor to make, use, sell, offer - for sale, have made, import, and otherwise transfer either its - Contributions or its Contributor Version. - -2.2. Effective Date - -The licenses granted in Section 2.1 with respect to any Contribution -become effective for each Contribution on the date the Contributor first -distributes such Contribution. - -2.3. Limitations on Grant Scope - -The licenses granted in this Section 2 are the only rights granted under -this License. No additional rights or licenses will be implied from the -distribution or licensing of Covered Software under this License. -Notwithstanding Section 2.1(b) above, no patent license is granted by a -Contributor: - -(a) for any code that a Contributor has removed from Covered Software; - or - -(b) for infringements caused by: (i) Your and any other third party's - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - -(c) under Patent Claims infringed by Covered Software in the absence of - its Contributions. - -This License does not grant any rights in the trademarks, service marks, -or logos of any Contributor (except as may be necessary to comply with -the notice requirements in Section 3.4). - -2.4. Subsequent Licenses - -No Contributor makes additional grants as a result of Your choice to -distribute the Covered Software under a subsequent version of this -License (see Section 10.2) or under the terms of a Secondary License (if -permitted under the terms of Section 3.3). - -2.5. Representation - -Each Contributor represents that the Contributor believes its -Contributions are its original creation(s) or it has sufficient rights -to grant the rights to its Contributions conveyed by this License. - -2.6. Fair Use - -This License is not intended to limit any rights You have under -applicable copyright doctrines of fair use, fair dealing, or other -equivalents. - -2.7. Conditions - -Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted -in Section 2.1. - -3. Responsibilities -------------------- - -3.1. Distribution of Source Form - -All distribution of Covered Software in Source Code Form, including any -Modifications that You create or to which You contribute, must be under -the terms of this License. You must inform recipients that the Source -Code Form of the Covered Software is governed by the terms of this -License, and how they can obtain a copy of this License. You may not -attempt to alter or restrict the recipients' rights in the Source Code -Form. - -3.2. Distribution of Executable Form - -If You distribute Covered Software in Executable Form then: - -(a) such Covered Software must also be made available in Source Code - Form, as described in Section 3.1, and You must inform recipients of - the Executable Form how they can obtain a copy of such Source Code - Form by reasonable means in a timely manner, at a charge no more - than the cost of distribution to the recipient; and - -(b) You may distribute such Executable Form under the terms of this - License, or sublicense it under different terms, provided that the - license for the Executable Form does not attempt to limit or alter - the recipients' rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - -You may create and distribute a Larger Work under terms of Your choice, -provided that You also comply with the requirements of this License for -the Covered Software. If the Larger Work is a combination of Covered -Software with a work governed by one or more Secondary Licenses, and the -Covered Software is not Incompatible With Secondary Licenses, this -License permits You to additionally distribute such Covered Software -under the terms of such Secondary License(s), so that the recipient of -the Larger Work may, at their option, further distribute the Covered -Software under the terms of either this License or such Secondary -License(s). - -3.4. Notices - -You may not remove or alter the substance of any license notices -(including copyright notices, patent notices, disclaimers of warranty, -or limitations of liability) contained within the Source Code Form of -the Covered Software, except that You may alter any license notices to -the extent required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - -You may choose to offer, and to charge a fee for, warranty, support, -indemnity or liability obligations to one or more recipients of Covered -Software. However, You may do so only on Your own behalf, and not on -behalf of any Contributor. You must make it absolutely clear that any -such warranty, support, indemnity, or liability obligation is offered by -You alone, and You hereby agree to indemnify every Contributor for any -liability incurred by such Contributor as a result of warranty, support, -indemnity or liability terms You offer. You may include additional -disclaimers of warranty and limitations of liability specific to any -jurisdiction. - -4. Inability to Comply Due to Statute or Regulation ---------------------------------------------------- - -If it is impossible for You to comply with any of the terms of this -License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: (a) comply with -the terms of this License to the maximum extent possible; and (b) -describe the limitations and the code they affect. Such description must -be placed in a text file included with all distributions of the Covered -Software under this License. Except to the extent prohibited by statute -or regulation, such description must be sufficiently detailed for a -recipient of ordinary skill to be able to understand it. - -5. Termination --------------- - -5.1. The rights granted under this License will terminate automatically -if You fail to comply with any of its terms. However, if You become -compliant, then the rights granted under this License from a particular -Contributor are reinstated (a) provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and (b) on an -ongoing basis, if such Contributor fails to notify You of the -non-compliance by some reasonable means prior to 60 days after You have -come back into compliance. Moreover, Your grants from a particular -Contributor are reinstated on an ongoing basis if such Contributor -notifies You of the non-compliance by some reasonable means, this is the -first time You have received notice of non-compliance with this License -from such Contributor, and You become compliant prior to 30 days after -Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent -infringement claim (excluding declaratory judgment actions, -counter-claims, and cross-claims) alleging that a Contributor Version -directly or indirectly infringes any patent, then the rights granted to -You by any and all Contributors for the Covered Software under Section -2.1 of this License shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all -end user license agreements (excluding distributors and resellers) which -have been validly granted by You or Your distributors under this License -prior to termination shall survive termination. - -************************************************************************ -* * -* 6. Disclaimer of Warranty * -* ------------------------- * -* * -* Covered Software is provided under this License on an "as is" * -* basis, without warranty of any kind, either expressed, implied, or * -* statutory, including, without limitation, warranties that the * -* Covered Software is free of defects, merchantable, fit for a * -* particular purpose or non-infringing. The entire risk as to the * -* quality and performance of the Covered Software is with You. * -* Should any Covered Software prove defective in any respect, You * -* (not any Contributor) assume the cost of any necessary servicing, * -* repair, or correction. This disclaimer of warranty constitutes an * -* essential part of this License. No use of any Covered Software is * -* authorized under this License except under this disclaimer. * -* * -************************************************************************ - -************************************************************************ -* * -* 7. Limitation of Liability * -* -------------------------- * -* * -* Under no circumstances and under no legal theory, whether tort * -* (including negligence), contract, or otherwise, shall any * -* Contributor, or anyone who distributes Covered Software as * -* permitted above, be liable to You for any direct, indirect, * -* special, incidental, or consequential damages of any character * -* including, without limitation, damages for lost profits, loss of * -* goodwill, work stoppage, computer failure or malfunction, or any * -* and all other commercial damages or losses, even if such party * -* shall have been informed of the possibility of such damages. This * -* limitation of liability shall not apply to liability for death or * -* personal injury resulting from such party's negligence to the * -* extent applicable law prohibits such limitation. Some * -* jurisdictions do not allow the exclusion or limitation of * -* incidental or consequential damages, so this exclusion and * -* limitation may not apply to You. * -* * -************************************************************************ - -8. Litigation -------------- - -Any litigation relating to this License may be brought only in the -courts of a jurisdiction where the defendant maintains its principal -place of business and such litigation shall be governed by laws of that -jurisdiction, without reference to its conflict-of-law provisions. -Nothing in this Section shall prevent a party's ability to bring -cross-claims or counter-claims. - -9. Miscellaneous ----------------- - -This License represents the complete agreement concerning the subject -matter hereof. If any provision of this License is held to be -unenforceable, such provision shall be reformed only to the extent -necessary to make it enforceable. Any law or regulation which provides -that the language of a contract shall be construed against the drafter -shall not be used to construe this License against a Contributor. - -10. Versions of the License ---------------------------- - -10.1. New Versions - -Mozilla Foundation is the license steward. Except as provided in Section -10.3, no one other than the license steward has the right to modify or -publish new versions of this License. Each version will be given a -distinguishing version number. - -10.2. Effect of New Versions - -You may distribute the Covered Software under the terms of the version -of the License under which You originally received the Covered Software, -or under the terms of any subsequent version published by the license -steward. - -10.3. Modified Versions - -If you create software not governed by this License, and you want to -create a new license for such software, you may create and use a -modified version of this License if you rename the license and remove -any references to the name of the license steward (except to note that -such modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary -Licenses - -If You choose to distribute Source Code Form that is Incompatible With -Secondary Licenses under the terms of this version of the License, the -notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice -------------------------------------------- - - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular -file, then You may include the notice in a location (such as a LICENSE -file in a relevant directory) where a recipient would be likely to look -for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - "Incompatible With Secondary Licenses" Notice ---------------------------------------------------------- - - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0. diff --git a/client/rest/licenses/httpasyncclient-NOTICE.txt b/client/rest/licenses/httpasyncclient-NOTICE.txt deleted file mode 100644 index 91e5c40c4c6..00000000000 --- a/client/rest/licenses/httpasyncclient-NOTICE.txt +++ /dev/null @@ -1,6 +0,0 @@ -Apache HttpComponents Client -Copyright 1999-2016 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - diff --git a/client/rest/licenses/httpcore-LICENSE.txt b/client/rest/licenses/httpcore-LICENSE.txt deleted file mode 100644 index 32f01eda18f..00000000000 --- a/client/rest/licenses/httpcore-LICENSE.txt +++ /dev/null @@ -1,558 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - -========================================================================= - -This project includes Public Suffix List copied from - -licensed under the terms of the Mozilla Public License, v. 2.0 - -Full license text: - -Mozilla Public License Version 2.0 -================================== - -1. Definitions --------------- - -1.1. "Contributor" - means each individual or legal entity that creates, contributes to - the creation of, or owns Covered Software. - -1.2. "Contributor Version" - means the combination of the Contributions of others (if any) used - by a Contributor and that particular Contributor's Contribution. - -1.3. "Contribution" - means Covered Software of a particular Contributor. - -1.4. "Covered Software" - means Source Code Form to which the initial Contributor has attached - the notice in Exhibit A, the Executable Form of such Source Code - Form, and Modifications of such Source Code Form, in each case - including portions thereof. - -1.5. "Incompatible With Secondary Licenses" - means - - (a) that the initial Contributor has attached the notice described - in Exhibit B to the Covered Software; or - - (b) that the Covered Software was made available under the terms of - version 1.1 or earlier of the License, but not also under the - terms of a Secondary License. - -1.6. "Executable Form" - means any form of the work other than Source Code Form. - -1.7. "Larger Work" - means a work that combines Covered Software with other material, in - a separate file or files, that is not Covered Software. - -1.8. "License" - means this document. - -1.9. "Licensable" - means having the right to grant, to the maximum extent possible, - whether at the time of the initial grant or subsequently, any and - all of the rights conveyed by this License. - -1.10. "Modifications" - means any of the following: - - (a) any file in Source Code Form that results from an addition to, - deletion from, or modification of the contents of Covered - Software; or - - (b) any new file in Source Code Form that contains any Covered - Software. - -1.11. "Patent Claims" of a Contributor - means any patent claim(s), including without limitation, method, - process, and apparatus claims, in any patent Licensable by such - Contributor that would be infringed, but for the grant of the - License, by the making, using, selling, offering for sale, having - made, import, or transfer of either its Contributions or its - Contributor Version. - -1.12. "Secondary License" - means either the GNU General Public License, Version 2.0, the GNU - Lesser General Public License, Version 2.1, the GNU Affero General - Public License, Version 3.0, or any later versions of those - licenses. - -1.13. "Source Code Form" - means the form of the work preferred for making modifications. - -1.14. "You" (or "Your") - means an individual or a legal entity exercising rights under this - License. For legal entities, "You" includes any entity that - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants and Conditions --------------------------------- - -2.1. Grants - -Each Contributor hereby grants You a world-wide, royalty-free, -non-exclusive license: - -(a) under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or - as part of a Larger Work; and - -(b) under Patent Claims of such Contributor to make, use, sell, offer - for sale, have made, import, and otherwise transfer either its - Contributions or its Contributor Version. - -2.2. Effective Date - -The licenses granted in Section 2.1 with respect to any Contribution -become effective for each Contribution on the date the Contributor first -distributes such Contribution. - -2.3. Limitations on Grant Scope - -The licenses granted in this Section 2 are the only rights granted under -this License. No additional rights or licenses will be implied from the -distribution or licensing of Covered Software under this License. -Notwithstanding Section 2.1(b) above, no patent license is granted by a -Contributor: - -(a) for any code that a Contributor has removed from Covered Software; - or - -(b) for infringements caused by: (i) Your and any other third party's - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - -(c) under Patent Claims infringed by Covered Software in the absence of - its Contributions. - -This License does not grant any rights in the trademarks, service marks, -or logos of any Contributor (except as may be necessary to comply with -the notice requirements in Section 3.4). - -2.4. Subsequent Licenses - -No Contributor makes additional grants as a result of Your choice to -distribute the Covered Software under a subsequent version of this -License (see Section 10.2) or under the terms of a Secondary License (if -permitted under the terms of Section 3.3). - -2.5. Representation - -Each Contributor represents that the Contributor believes its -Contributions are its original creation(s) or it has sufficient rights -to grant the rights to its Contributions conveyed by this License. - -2.6. Fair Use - -This License is not intended to limit any rights You have under -applicable copyright doctrines of fair use, fair dealing, or other -equivalents. - -2.7. Conditions - -Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted -in Section 2.1. - -3. Responsibilities -------------------- - -3.1. Distribution of Source Form - -All distribution of Covered Software in Source Code Form, including any -Modifications that You create or to which You contribute, must be under -the terms of this License. You must inform recipients that the Source -Code Form of the Covered Software is governed by the terms of this -License, and how they can obtain a copy of this License. You may not -attempt to alter or restrict the recipients' rights in the Source Code -Form. - -3.2. Distribution of Executable Form - -If You distribute Covered Software in Executable Form then: - -(a) such Covered Software must also be made available in Source Code - Form, as described in Section 3.1, and You must inform recipients of - the Executable Form how they can obtain a copy of such Source Code - Form by reasonable means in a timely manner, at a charge no more - than the cost of distribution to the recipient; and - -(b) You may distribute such Executable Form under the terms of this - License, or sublicense it under different terms, provided that the - license for the Executable Form does not attempt to limit or alter - the recipients' rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - -You may create and distribute a Larger Work under terms of Your choice, -provided that You also comply with the requirements of this License for -the Covered Software. If the Larger Work is a combination of Covered -Software with a work governed by one or more Secondary Licenses, and the -Covered Software is not Incompatible With Secondary Licenses, this -License permits You to additionally distribute such Covered Software -under the terms of such Secondary License(s), so that the recipient of -the Larger Work may, at their option, further distribute the Covered -Software under the terms of either this License or such Secondary -License(s). - -3.4. Notices - -You may not remove or alter the substance of any license notices -(including copyright notices, patent notices, disclaimers of warranty, -or limitations of liability) contained within the Source Code Form of -the Covered Software, except that You may alter any license notices to -the extent required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - -You may choose to offer, and to charge a fee for, warranty, support, -indemnity or liability obligations to one or more recipients of Covered -Software. However, You may do so only on Your own behalf, and not on -behalf of any Contributor. You must make it absolutely clear that any -such warranty, support, indemnity, or liability obligation is offered by -You alone, and You hereby agree to indemnify every Contributor for any -liability incurred by such Contributor as a result of warranty, support, -indemnity or liability terms You offer. You may include additional -disclaimers of warranty and limitations of liability specific to any -jurisdiction. - -4. Inability to Comply Due to Statute or Regulation ---------------------------------------------------- - -If it is impossible for You to comply with any of the terms of this -License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: (a) comply with -the terms of this License to the maximum extent possible; and (b) -describe the limitations and the code they affect. Such description must -be placed in a text file included with all distributions of the Covered -Software under this License. Except to the extent prohibited by statute -or regulation, such description must be sufficiently detailed for a -recipient of ordinary skill to be able to understand it. - -5. Termination --------------- - -5.1. The rights granted under this License will terminate automatically -if You fail to comply with any of its terms. However, if You become -compliant, then the rights granted under this License from a particular -Contributor are reinstated (a) provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and (b) on an -ongoing basis, if such Contributor fails to notify You of the -non-compliance by some reasonable means prior to 60 days after You have -come back into compliance. Moreover, Your grants from a particular -Contributor are reinstated on an ongoing basis if such Contributor -notifies You of the non-compliance by some reasonable means, this is the -first time You have received notice of non-compliance with this License -from such Contributor, and You become compliant prior to 30 days after -Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent -infringement claim (excluding declaratory judgment actions, -counter-claims, and cross-claims) alleging that a Contributor Version -directly or indirectly infringes any patent, then the rights granted to -You by any and all Contributors for the Covered Software under Section -2.1 of this License shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all -end user license agreements (excluding distributors and resellers) which -have been validly granted by You or Your distributors under this License -prior to termination shall survive termination. - -************************************************************************ -* * -* 6. Disclaimer of Warranty * -* ------------------------- * -* * -* Covered Software is provided under this License on an "as is" * -* basis, without warranty of any kind, either expressed, implied, or * -* statutory, including, without limitation, warranties that the * -* Covered Software is free of defects, merchantable, fit for a * -* particular purpose or non-infringing. The entire risk as to the * -* quality and performance of the Covered Software is with You. * -* Should any Covered Software prove defective in any respect, You * -* (not any Contributor) assume the cost of any necessary servicing, * -* repair, or correction. This disclaimer of warranty constitutes an * -* essential part of this License. No use of any Covered Software is * -* authorized under this License except under this disclaimer. * -* * -************************************************************************ - -************************************************************************ -* * -* 7. Limitation of Liability * -* -------------------------- * -* * -* Under no circumstances and under no legal theory, whether tort * -* (including negligence), contract, or otherwise, shall any * -* Contributor, or anyone who distributes Covered Software as * -* permitted above, be liable to You for any direct, indirect, * -* special, incidental, or consequential damages of any character * -* including, without limitation, damages for lost profits, loss of * -* goodwill, work stoppage, computer failure or malfunction, or any * -* and all other commercial damages or losses, even if such party * -* shall have been informed of the possibility of such damages. This * -* limitation of liability shall not apply to liability for death or * -* personal injury resulting from such party's negligence to the * -* extent applicable law prohibits such limitation. Some * -* jurisdictions do not allow the exclusion or limitation of * -* incidental or consequential damages, so this exclusion and * -* limitation may not apply to You. * -* * -************************************************************************ - -8. Litigation -------------- - -Any litigation relating to this License may be brought only in the -courts of a jurisdiction where the defendant maintains its principal -place of business and such litigation shall be governed by laws of that -jurisdiction, without reference to its conflict-of-law provisions. -Nothing in this Section shall prevent a party's ability to bring -cross-claims or counter-claims. - -9. Miscellaneous ----------------- - -This License represents the complete agreement concerning the subject -matter hereof. If any provision of this License is held to be -unenforceable, such provision shall be reformed only to the extent -necessary to make it enforceable. Any law or regulation which provides -that the language of a contract shall be construed against the drafter -shall not be used to construe this License against a Contributor. - -10. Versions of the License ---------------------------- - -10.1. New Versions - -Mozilla Foundation is the license steward. Except as provided in Section -10.3, no one other than the license steward has the right to modify or -publish new versions of this License. Each version will be given a -distinguishing version number. - -10.2. Effect of New Versions - -You may distribute the Covered Software under the terms of the version -of the License under which You originally received the Covered Software, -or under the terms of any subsequent version published by the license -steward. - -10.3. Modified Versions - -If you create software not governed by this License, and you want to -create a new license for such software, you may create and use a -modified version of this License if you rename the license and remove -any references to the name of the license steward (except to note that -such modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary -Licenses - -If You choose to distribute Source Code Form that is Incompatible With -Secondary Licenses under the terms of this version of the License, the -notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice -------------------------------------------- - - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular -file, then You may include the notice in a location (such as a LICENSE -file in a relevant directory) where a recipient would be likely to look -for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - "Incompatible With Secondary Licenses" Notice ---------------------------------------------------------- - - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0. diff --git a/client/rest/licenses/httpcore-NOTICE.txt b/client/rest/licenses/httpcore-NOTICE.txt deleted file mode 100644 index 91e5c40c4c6..00000000000 --- a/client/rest/licenses/httpcore-NOTICE.txt +++ /dev/null @@ -1,6 +0,0 @@ -Apache HttpComponents Client -Copyright 1999-2016 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - diff --git a/client/rest/licenses/httpcore-nio-LICENSE.txt b/client/rest/licenses/httpcore-nio-LICENSE.txt deleted file mode 100644 index 32f01eda18f..00000000000 --- a/client/rest/licenses/httpcore-nio-LICENSE.txt +++ /dev/null @@ -1,558 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - -========================================================================= - -This project includes Public Suffix List copied from - -licensed under the terms of the Mozilla Public License, v. 2.0 - -Full license text: - -Mozilla Public License Version 2.0 -================================== - -1. Definitions --------------- - -1.1. "Contributor" - means each individual or legal entity that creates, contributes to - the creation of, or owns Covered Software. - -1.2. "Contributor Version" - means the combination of the Contributions of others (if any) used - by a Contributor and that particular Contributor's Contribution. - -1.3. "Contribution" - means Covered Software of a particular Contributor. - -1.4. "Covered Software" - means Source Code Form to which the initial Contributor has attached - the notice in Exhibit A, the Executable Form of such Source Code - Form, and Modifications of such Source Code Form, in each case - including portions thereof. - -1.5. "Incompatible With Secondary Licenses" - means - - (a) that the initial Contributor has attached the notice described - in Exhibit B to the Covered Software; or - - (b) that the Covered Software was made available under the terms of - version 1.1 or earlier of the License, but not also under the - terms of a Secondary License. - -1.6. "Executable Form" - means any form of the work other than Source Code Form. - -1.7. "Larger Work" - means a work that combines Covered Software with other material, in - a separate file or files, that is not Covered Software. - -1.8. "License" - means this document. - -1.9. "Licensable" - means having the right to grant, to the maximum extent possible, - whether at the time of the initial grant or subsequently, any and - all of the rights conveyed by this License. - -1.10. "Modifications" - means any of the following: - - (a) any file in Source Code Form that results from an addition to, - deletion from, or modification of the contents of Covered - Software; or - - (b) any new file in Source Code Form that contains any Covered - Software. - -1.11. "Patent Claims" of a Contributor - means any patent claim(s), including without limitation, method, - process, and apparatus claims, in any patent Licensable by such - Contributor that would be infringed, but for the grant of the - License, by the making, using, selling, offering for sale, having - made, import, or transfer of either its Contributions or its - Contributor Version. - -1.12. "Secondary License" - means either the GNU General Public License, Version 2.0, the GNU - Lesser General Public License, Version 2.1, the GNU Affero General - Public License, Version 3.0, or any later versions of those - licenses. - -1.13. "Source Code Form" - means the form of the work preferred for making modifications. - -1.14. "You" (or "Your") - means an individual or a legal entity exercising rights under this - License. For legal entities, "You" includes any entity that - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants and Conditions --------------------------------- - -2.1. Grants - -Each Contributor hereby grants You a world-wide, royalty-free, -non-exclusive license: - -(a) under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or - as part of a Larger Work; and - -(b) under Patent Claims of such Contributor to make, use, sell, offer - for sale, have made, import, and otherwise transfer either its - Contributions or its Contributor Version. - -2.2. Effective Date - -The licenses granted in Section 2.1 with respect to any Contribution -become effective for each Contribution on the date the Contributor first -distributes such Contribution. - -2.3. Limitations on Grant Scope - -The licenses granted in this Section 2 are the only rights granted under -this License. No additional rights or licenses will be implied from the -distribution or licensing of Covered Software under this License. -Notwithstanding Section 2.1(b) above, no patent license is granted by a -Contributor: - -(a) for any code that a Contributor has removed from Covered Software; - or - -(b) for infringements caused by: (i) Your and any other third party's - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - -(c) under Patent Claims infringed by Covered Software in the absence of - its Contributions. - -This License does not grant any rights in the trademarks, service marks, -or logos of any Contributor (except as may be necessary to comply with -the notice requirements in Section 3.4). - -2.4. Subsequent Licenses - -No Contributor makes additional grants as a result of Your choice to -distribute the Covered Software under a subsequent version of this -License (see Section 10.2) or under the terms of a Secondary License (if -permitted under the terms of Section 3.3). - -2.5. Representation - -Each Contributor represents that the Contributor believes its -Contributions are its original creation(s) or it has sufficient rights -to grant the rights to its Contributions conveyed by this License. - -2.6. Fair Use - -This License is not intended to limit any rights You have under -applicable copyright doctrines of fair use, fair dealing, or other -equivalents. - -2.7. Conditions - -Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted -in Section 2.1. - -3. Responsibilities -------------------- - -3.1. Distribution of Source Form - -All distribution of Covered Software in Source Code Form, including any -Modifications that You create or to which You contribute, must be under -the terms of this License. You must inform recipients that the Source -Code Form of the Covered Software is governed by the terms of this -License, and how they can obtain a copy of this License. You may not -attempt to alter or restrict the recipients' rights in the Source Code -Form. - -3.2. Distribution of Executable Form - -If You distribute Covered Software in Executable Form then: - -(a) such Covered Software must also be made available in Source Code - Form, as described in Section 3.1, and You must inform recipients of - the Executable Form how they can obtain a copy of such Source Code - Form by reasonable means in a timely manner, at a charge no more - than the cost of distribution to the recipient; and - -(b) You may distribute such Executable Form under the terms of this - License, or sublicense it under different terms, provided that the - license for the Executable Form does not attempt to limit or alter - the recipients' rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - -You may create and distribute a Larger Work under terms of Your choice, -provided that You also comply with the requirements of this License for -the Covered Software. If the Larger Work is a combination of Covered -Software with a work governed by one or more Secondary Licenses, and the -Covered Software is not Incompatible With Secondary Licenses, this -License permits You to additionally distribute such Covered Software -under the terms of such Secondary License(s), so that the recipient of -the Larger Work may, at their option, further distribute the Covered -Software under the terms of either this License or such Secondary -License(s). - -3.4. Notices - -You may not remove or alter the substance of any license notices -(including copyright notices, patent notices, disclaimers of warranty, -or limitations of liability) contained within the Source Code Form of -the Covered Software, except that You may alter any license notices to -the extent required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - -You may choose to offer, and to charge a fee for, warranty, support, -indemnity or liability obligations to one or more recipients of Covered -Software. However, You may do so only on Your own behalf, and not on -behalf of any Contributor. You must make it absolutely clear that any -such warranty, support, indemnity, or liability obligation is offered by -You alone, and You hereby agree to indemnify every Contributor for any -liability incurred by such Contributor as a result of warranty, support, -indemnity or liability terms You offer. You may include additional -disclaimers of warranty and limitations of liability specific to any -jurisdiction. - -4. Inability to Comply Due to Statute or Regulation ---------------------------------------------------- - -If it is impossible for You to comply with any of the terms of this -License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: (a) comply with -the terms of this License to the maximum extent possible; and (b) -describe the limitations and the code they affect. Such description must -be placed in a text file included with all distributions of the Covered -Software under this License. Except to the extent prohibited by statute -or regulation, such description must be sufficiently detailed for a -recipient of ordinary skill to be able to understand it. - -5. Termination --------------- - -5.1. The rights granted under this License will terminate automatically -if You fail to comply with any of its terms. However, if You become -compliant, then the rights granted under this License from a particular -Contributor are reinstated (a) provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and (b) on an -ongoing basis, if such Contributor fails to notify You of the -non-compliance by some reasonable means prior to 60 days after You have -come back into compliance. Moreover, Your grants from a particular -Contributor are reinstated on an ongoing basis if such Contributor -notifies You of the non-compliance by some reasonable means, this is the -first time You have received notice of non-compliance with this License -from such Contributor, and You become compliant prior to 30 days after -Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent -infringement claim (excluding declaratory judgment actions, -counter-claims, and cross-claims) alleging that a Contributor Version -directly or indirectly infringes any patent, then the rights granted to -You by any and all Contributors for the Covered Software under Section -2.1 of this License shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all -end user license agreements (excluding distributors and resellers) which -have been validly granted by You or Your distributors under this License -prior to termination shall survive termination. - -************************************************************************ -* * -* 6. Disclaimer of Warranty * -* ------------------------- * -* * -* Covered Software is provided under this License on an "as is" * -* basis, without warranty of any kind, either expressed, implied, or * -* statutory, including, without limitation, warranties that the * -* Covered Software is free of defects, merchantable, fit for a * -* particular purpose or non-infringing. The entire risk as to the * -* quality and performance of the Covered Software is with You. * -* Should any Covered Software prove defective in any respect, You * -* (not any Contributor) assume the cost of any necessary servicing, * -* repair, or correction. This disclaimer of warranty constitutes an * -* essential part of this License. No use of any Covered Software is * -* authorized under this License except under this disclaimer. * -* * -************************************************************************ - -************************************************************************ -* * -* 7. Limitation of Liability * -* -------------------------- * -* * -* Under no circumstances and under no legal theory, whether tort * -* (including negligence), contract, or otherwise, shall any * -* Contributor, or anyone who distributes Covered Software as * -* permitted above, be liable to You for any direct, indirect, * -* special, incidental, or consequential damages of any character * -* including, without limitation, damages for lost profits, loss of * -* goodwill, work stoppage, computer failure or malfunction, or any * -* and all other commercial damages or losses, even if such party * -* shall have been informed of the possibility of such damages. This * -* limitation of liability shall not apply to liability for death or * -* personal injury resulting from such party's negligence to the * -* extent applicable law prohibits such limitation. Some * -* jurisdictions do not allow the exclusion or limitation of * -* incidental or consequential damages, so this exclusion and * -* limitation may not apply to You. * -* * -************************************************************************ - -8. Litigation -------------- - -Any litigation relating to this License may be brought only in the -courts of a jurisdiction where the defendant maintains its principal -place of business and such litigation shall be governed by laws of that -jurisdiction, without reference to its conflict-of-law provisions. -Nothing in this Section shall prevent a party's ability to bring -cross-claims or counter-claims. - -9. Miscellaneous ----------------- - -This License represents the complete agreement concerning the subject -matter hereof. If any provision of this License is held to be -unenforceable, such provision shall be reformed only to the extent -necessary to make it enforceable. Any law or regulation which provides -that the language of a contract shall be construed against the drafter -shall not be used to construe this License against a Contributor. - -10. Versions of the License ---------------------------- - -10.1. New Versions - -Mozilla Foundation is the license steward. Except as provided in Section -10.3, no one other than the license steward has the right to modify or -publish new versions of this License. Each version will be given a -distinguishing version number. - -10.2. Effect of New Versions - -You may distribute the Covered Software under the terms of the version -of the License under which You originally received the Covered Software, -or under the terms of any subsequent version published by the license -steward. - -10.3. Modified Versions - -If you create software not governed by this License, and you want to -create a new license for such software, you may create and use a -modified version of this License if you rename the license and remove -any references to the name of the license steward (except to note that -such modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary -Licenses - -If You choose to distribute Source Code Form that is Incompatible With -Secondary Licenses under the terms of this version of the License, the -notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice -------------------------------------------- - - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular -file, then You may include the notice in a location (such as a LICENSE -file in a relevant directory) where a recipient would be likely to look -for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - "Incompatible With Secondary Licenses" Notice ---------------------------------------------------------- - - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0. diff --git a/client/rest/licenses/httpcore-nio-NOTICE.txt b/client/rest/licenses/httpcore-nio-NOTICE.txt deleted file mode 100644 index 91e5c40c4c6..00000000000 --- a/client/rest/licenses/httpcore-nio-NOTICE.txt +++ /dev/null @@ -1,6 +0,0 @@ -Apache HttpComponents Client -Copyright 1999-2016 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index 7cf16ee85d8..639871cccb3 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -58,6 +58,11 @@ forbiddenApisTest { signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] } +dependencyLicenses { + mapping from: /http.*/, to: 'httpclient' + mapping from: /commons-.*/, to: 'commons' +} + //JarHell is part of es core, which we don't want to pull in jarHell.enabled=false diff --git a/client/rest/licenses/commons-logging-LICENSE.txt b/client/sniffer/licenses/commons-LICENSE.txt similarity index 100% rename from client/rest/licenses/commons-logging-LICENSE.txt rename to client/sniffer/licenses/commons-LICENSE.txt diff --git a/client/sniffer/licenses/commons-codec-NOTICE.txt b/client/sniffer/licenses/commons-NOTICE.txt similarity index 100% rename from client/sniffer/licenses/commons-codec-NOTICE.txt rename to client/sniffer/licenses/commons-NOTICE.txt diff --git a/client/sniffer/licenses/commons-codec-LICENSE.txt b/client/sniffer/licenses/commons-codec-LICENSE.txt deleted file mode 100644 index d6456956733..00000000000 --- a/client/sniffer/licenses/commons-codec-LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/client/sniffer/licenses/commons-logging-LICENSE.txt b/client/sniffer/licenses/commons-logging-LICENSE.txt deleted file mode 100644 index d6456956733..00000000000 --- a/client/sniffer/licenses/commons-logging-LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/client/sniffer/licenses/commons-logging-NOTICE.txt b/client/sniffer/licenses/commons-logging-NOTICE.txt deleted file mode 100644 index 556bd03951d..00000000000 --- a/client/sniffer/licenses/commons-logging-NOTICE.txt +++ /dev/null @@ -1,6 +0,0 @@ -Apache Commons Logging -Copyright 2003-2014 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - diff --git a/client/sniffer/licenses/httpcore-LICENSE.txt b/client/sniffer/licenses/httpcore-LICENSE.txt deleted file mode 100644 index 32f01eda18f..00000000000 --- a/client/sniffer/licenses/httpcore-LICENSE.txt +++ /dev/null @@ -1,558 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - -========================================================================= - -This project includes Public Suffix List copied from - -licensed under the terms of the Mozilla Public License, v. 2.0 - -Full license text: - -Mozilla Public License Version 2.0 -================================== - -1. Definitions --------------- - -1.1. "Contributor" - means each individual or legal entity that creates, contributes to - the creation of, or owns Covered Software. - -1.2. "Contributor Version" - means the combination of the Contributions of others (if any) used - by a Contributor and that particular Contributor's Contribution. - -1.3. "Contribution" - means Covered Software of a particular Contributor. - -1.4. "Covered Software" - means Source Code Form to which the initial Contributor has attached - the notice in Exhibit A, the Executable Form of such Source Code - Form, and Modifications of such Source Code Form, in each case - including portions thereof. - -1.5. "Incompatible With Secondary Licenses" - means - - (a) that the initial Contributor has attached the notice described - in Exhibit B to the Covered Software; or - - (b) that the Covered Software was made available under the terms of - version 1.1 or earlier of the License, but not also under the - terms of a Secondary License. - -1.6. "Executable Form" - means any form of the work other than Source Code Form. - -1.7. "Larger Work" - means a work that combines Covered Software with other material, in - a separate file or files, that is not Covered Software. - -1.8. "License" - means this document. - -1.9. "Licensable" - means having the right to grant, to the maximum extent possible, - whether at the time of the initial grant or subsequently, any and - all of the rights conveyed by this License. - -1.10. "Modifications" - means any of the following: - - (a) any file in Source Code Form that results from an addition to, - deletion from, or modification of the contents of Covered - Software; or - - (b) any new file in Source Code Form that contains any Covered - Software. - -1.11. "Patent Claims" of a Contributor - means any patent claim(s), including without limitation, method, - process, and apparatus claims, in any patent Licensable by such - Contributor that would be infringed, but for the grant of the - License, by the making, using, selling, offering for sale, having - made, import, or transfer of either its Contributions or its - Contributor Version. - -1.12. "Secondary License" - means either the GNU General Public License, Version 2.0, the GNU - Lesser General Public License, Version 2.1, the GNU Affero General - Public License, Version 3.0, or any later versions of those - licenses. - -1.13. "Source Code Form" - means the form of the work preferred for making modifications. - -1.14. "You" (or "Your") - means an individual or a legal entity exercising rights under this - License. For legal entities, "You" includes any entity that - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants and Conditions --------------------------------- - -2.1. Grants - -Each Contributor hereby grants You a world-wide, royalty-free, -non-exclusive license: - -(a) under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or - as part of a Larger Work; and - -(b) under Patent Claims of such Contributor to make, use, sell, offer - for sale, have made, import, and otherwise transfer either its - Contributions or its Contributor Version. - -2.2. Effective Date - -The licenses granted in Section 2.1 with respect to any Contribution -become effective for each Contribution on the date the Contributor first -distributes such Contribution. - -2.3. Limitations on Grant Scope - -The licenses granted in this Section 2 are the only rights granted under -this License. No additional rights or licenses will be implied from the -distribution or licensing of Covered Software under this License. -Notwithstanding Section 2.1(b) above, no patent license is granted by a -Contributor: - -(a) for any code that a Contributor has removed from Covered Software; - or - -(b) for infringements caused by: (i) Your and any other third party's - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - -(c) under Patent Claims infringed by Covered Software in the absence of - its Contributions. - -This License does not grant any rights in the trademarks, service marks, -or logos of any Contributor (except as may be necessary to comply with -the notice requirements in Section 3.4). - -2.4. Subsequent Licenses - -No Contributor makes additional grants as a result of Your choice to -distribute the Covered Software under a subsequent version of this -License (see Section 10.2) or under the terms of a Secondary License (if -permitted under the terms of Section 3.3). - -2.5. Representation - -Each Contributor represents that the Contributor believes its -Contributions are its original creation(s) or it has sufficient rights -to grant the rights to its Contributions conveyed by this License. - -2.6. Fair Use - -This License is not intended to limit any rights You have under -applicable copyright doctrines of fair use, fair dealing, or other -equivalents. - -2.7. Conditions - -Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted -in Section 2.1. - -3. Responsibilities -------------------- - -3.1. Distribution of Source Form - -All distribution of Covered Software in Source Code Form, including any -Modifications that You create or to which You contribute, must be under -the terms of this License. You must inform recipients that the Source -Code Form of the Covered Software is governed by the terms of this -License, and how they can obtain a copy of this License. You may not -attempt to alter or restrict the recipients' rights in the Source Code -Form. - -3.2. Distribution of Executable Form - -If You distribute Covered Software in Executable Form then: - -(a) such Covered Software must also be made available in Source Code - Form, as described in Section 3.1, and You must inform recipients of - the Executable Form how they can obtain a copy of such Source Code - Form by reasonable means in a timely manner, at a charge no more - than the cost of distribution to the recipient; and - -(b) You may distribute such Executable Form under the terms of this - License, or sublicense it under different terms, provided that the - license for the Executable Form does not attempt to limit or alter - the recipients' rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - -You may create and distribute a Larger Work under terms of Your choice, -provided that You also comply with the requirements of this License for -the Covered Software. If the Larger Work is a combination of Covered -Software with a work governed by one or more Secondary Licenses, and the -Covered Software is not Incompatible With Secondary Licenses, this -License permits You to additionally distribute such Covered Software -under the terms of such Secondary License(s), so that the recipient of -the Larger Work may, at their option, further distribute the Covered -Software under the terms of either this License or such Secondary -License(s). - -3.4. Notices - -You may not remove or alter the substance of any license notices -(including copyright notices, patent notices, disclaimers of warranty, -or limitations of liability) contained within the Source Code Form of -the Covered Software, except that You may alter any license notices to -the extent required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - -You may choose to offer, and to charge a fee for, warranty, support, -indemnity or liability obligations to one or more recipients of Covered -Software. However, You may do so only on Your own behalf, and not on -behalf of any Contributor. You must make it absolutely clear that any -such warranty, support, indemnity, or liability obligation is offered by -You alone, and You hereby agree to indemnify every Contributor for any -liability incurred by such Contributor as a result of warranty, support, -indemnity or liability terms You offer. You may include additional -disclaimers of warranty and limitations of liability specific to any -jurisdiction. - -4. Inability to Comply Due to Statute or Regulation ---------------------------------------------------- - -If it is impossible for You to comply with any of the terms of this -License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: (a) comply with -the terms of this License to the maximum extent possible; and (b) -describe the limitations and the code they affect. Such description must -be placed in a text file included with all distributions of the Covered -Software under this License. Except to the extent prohibited by statute -or regulation, such description must be sufficiently detailed for a -recipient of ordinary skill to be able to understand it. - -5. Termination --------------- - -5.1. The rights granted under this License will terminate automatically -if You fail to comply with any of its terms. However, if You become -compliant, then the rights granted under this License from a particular -Contributor are reinstated (a) provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and (b) on an -ongoing basis, if such Contributor fails to notify You of the -non-compliance by some reasonable means prior to 60 days after You have -come back into compliance. Moreover, Your grants from a particular -Contributor are reinstated on an ongoing basis if such Contributor -notifies You of the non-compliance by some reasonable means, this is the -first time You have received notice of non-compliance with this License -from such Contributor, and You become compliant prior to 30 days after -Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent -infringement claim (excluding declaratory judgment actions, -counter-claims, and cross-claims) alleging that a Contributor Version -directly or indirectly infringes any patent, then the rights granted to -You by any and all Contributors for the Covered Software under Section -2.1 of this License shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all -end user license agreements (excluding distributors and resellers) which -have been validly granted by You or Your distributors under this License -prior to termination shall survive termination. - -************************************************************************ -* * -* 6. Disclaimer of Warranty * -* ------------------------- * -* * -* Covered Software is provided under this License on an "as is" * -* basis, without warranty of any kind, either expressed, implied, or * -* statutory, including, without limitation, warranties that the * -* Covered Software is free of defects, merchantable, fit for a * -* particular purpose or non-infringing. The entire risk as to the * -* quality and performance of the Covered Software is with You. * -* Should any Covered Software prove defective in any respect, You * -* (not any Contributor) assume the cost of any necessary servicing, * -* repair, or correction. This disclaimer of warranty constitutes an * -* essential part of this License. No use of any Covered Software is * -* authorized under this License except under this disclaimer. * -* * -************************************************************************ - -************************************************************************ -* * -* 7. Limitation of Liability * -* -------------------------- * -* * -* Under no circumstances and under no legal theory, whether tort * -* (including negligence), contract, or otherwise, shall any * -* Contributor, or anyone who distributes Covered Software as * -* permitted above, be liable to You for any direct, indirect, * -* special, incidental, or consequential damages of any character * -* including, without limitation, damages for lost profits, loss of * -* goodwill, work stoppage, computer failure or malfunction, or any * -* and all other commercial damages or losses, even if such party * -* shall have been informed of the possibility of such damages. This * -* limitation of liability shall not apply to liability for death or * -* personal injury resulting from such party's negligence to the * -* extent applicable law prohibits such limitation. Some * -* jurisdictions do not allow the exclusion or limitation of * -* incidental or consequential damages, so this exclusion and * -* limitation may not apply to You. * -* * -************************************************************************ - -8. Litigation -------------- - -Any litigation relating to this License may be brought only in the -courts of a jurisdiction where the defendant maintains its principal -place of business and such litigation shall be governed by laws of that -jurisdiction, without reference to its conflict-of-law provisions. -Nothing in this Section shall prevent a party's ability to bring -cross-claims or counter-claims. - -9. Miscellaneous ----------------- - -This License represents the complete agreement concerning the subject -matter hereof. If any provision of this License is held to be -unenforceable, such provision shall be reformed only to the extent -necessary to make it enforceable. Any law or regulation which provides -that the language of a contract shall be construed against the drafter -shall not be used to construe this License against a Contributor. - -10. Versions of the License ---------------------------- - -10.1. New Versions - -Mozilla Foundation is the license steward. Except as provided in Section -10.3, no one other than the license steward has the right to modify or -publish new versions of this License. Each version will be given a -distinguishing version number. - -10.2. Effect of New Versions - -You may distribute the Covered Software under the terms of the version -of the License under which You originally received the Covered Software, -or under the terms of any subsequent version published by the license -steward. - -10.3. Modified Versions - -If you create software not governed by this License, and you want to -create a new license for such software, you may create and use a -modified version of this License if you rename the license and remove -any references to the name of the license steward (except to note that -such modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary -Licenses - -If You choose to distribute Source Code Form that is Incompatible With -Secondary Licenses under the terms of this version of the License, the -notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice -------------------------------------------- - - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular -file, then You may include the notice in a location (such as a LICENSE -file in a relevant directory) where a recipient would be likely to look -for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - "Incompatible With Secondary Licenses" Notice ---------------------------------------------------------- - - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0. diff --git a/client/sniffer/licenses/httpcore-NOTICE.txt b/client/sniffer/licenses/httpcore-NOTICE.txt deleted file mode 100644 index 91e5c40c4c6..00000000000 --- a/client/sniffer/licenses/httpcore-NOTICE.txt +++ /dev/null @@ -1,6 +0,0 @@ -Apache HttpComponents Client -Copyright 1999-2016 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - From 1bb33cf572c04e7e61dafb2826530795e8edbb95 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 18 Jul 2016 18:13:14 +0200 Subject: [PATCH 17/93] Remove RestClient#JSON_CONTENT_TYPE constant, already available in ContentType class --- .../src/main/java/org/elasticsearch/client/RestClient.java | 3 --- .../java/org/elasticsearch/http/DeprecationHttpIT.java | 7 +++---- .../java/org/elasticsearch/http/HttpCompressionIT.java | 3 ++- .../org/elasticsearch/test/rest/client/RestTestClient.java | 5 +++-- 4 files changed, 8 insertions(+), 10 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 56baa35598f..6b4e7f554fb 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -20,7 +20,6 @@ package org.elasticsearch.client; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.http.Consts; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; @@ -37,7 +36,6 @@ import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.methods.HttpTrace; import org.apache.http.client.utils.URIBuilder; import org.apache.http.concurrent.FutureCallback; -import org.apache.http.entity.ContentType; import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; import org.apache.http.nio.client.methods.HttpAsyncMethods; import org.apache.http.nio.protocol.HttpAsyncRequestProducer; @@ -79,7 +77,6 @@ import java.util.concurrent.atomic.AtomicInteger; public final class RestClient implements Closeable { private static final Log logger = LogFactory.getLog(RestClient.class); - public static ContentType JSON_CONTENT_TYPE = ContentType.create("application/json", Consts.UTF_8); private final CloseableHttpAsyncClient client; //we don't rely on default headers supported by HttpAsyncClient as those cannot be replaced diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java index 4455eaab258..40c757a3b1f 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java @@ -20,16 +20,15 @@ package org.elasticsearch.http; import org.apache.http.Header; import org.apache.http.HttpEntity; +import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Response; -import org.elasticsearch.client.RestClient; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.plugins.Plugin; - import org.hamcrest.Matcher; import java.io.IOException; @@ -103,7 +102,7 @@ public class DeprecationHttpIT extends HttpSmokeTestCase { // trigger all index deprecations Response response = getRestClient().performRequest("GET", "/" + commaSeparatedIndices + "/_search", - Collections.emptyMap(), new StringEntity(body, RestClient.JSON_CONTENT_TYPE)); + Collections.emptyMap(), new StringEntity(body, ContentType.APPLICATION_JSON)); assertThat(response.getStatusLine().getStatusCode(), equalTo(OK.getStatus())); final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); @@ -201,7 +200,7 @@ public class DeprecationHttpIT extends HttpSmokeTestCase { builder.endArray().endObject(); - return new StringEntity(builder.string(), RestClient.JSON_CONTENT_TYPE); + return new StringEntity(builder.string(), ContentType.APPLICATION_JSON); } } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java index ca637a78555..a6d9f833d22 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.http; import org.apache.http.HttpHeaders; +import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHeader; import org.elasticsearch.client.Response; @@ -35,7 +36,7 @@ public class HttpCompressionIT extends ESIntegTestCase { " \"first name\": \"Steve\",\n" + " \"last name\": \"Jobs\"\n" + " }\n" + - "}", RestClient.JSON_CONTENT_TYPE); + "}", ContentType.APPLICATION_JSON); @Override protected boolean ignoreExternalCluster() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java index 1ba5960bf9a..dfa59509790 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java @@ -22,6 +22,7 @@ import com.carrotsearch.randomizedtesting.RandomizedTest; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; +import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHeader; import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; @@ -135,7 +136,7 @@ public class RestTestClient implements Closeable { String path = "/"+ Objects.requireNonNull(queryStringParams.remove("path"), "Path must be set to use raw request"); HttpEntity entity = null; if (body != null && body.length() > 0) { - entity = new StringEntity(body, RestClient.JSON_CONTENT_TYPE); + entity = new StringEntity(body, ContentType.APPLICATION_JSON); } // And everything else is a url parameter! try { @@ -204,7 +205,7 @@ public class RestTestClient implements Closeable { requestMethod = "GET"; } else { requestMethod = RandomizedTest.randomFrom(supportedMethods); - requestBody = new StringEntity(body, RestClient.JSON_CONTENT_TYPE); + requestBody = new StringEntity(body, ContentType.APPLICATION_JSON); } } else { if (restApi.isBodyRequired()) { From 8eccdff9ad623925c7f33dd8b576d76648ed226f Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 19 Jul 2016 14:51:46 +0200 Subject: [PATCH 18/93] add HeapBufferedAsyncResponseConsumer with configurable max buffer size We keep the default async client behaviour like in BasicAsyncResponseConsumer, but we lower the maximum size of the buffer from Integer.MAX_VALUE (2GB) to 10 MB. This way users will realize they are buffering big responses in heap hence they'll know they have to do something about it, either write their own response consumer or increase the buffer size limit by providing their manually creeted instance of HeapBufferedAsyncResponseConsumer (constructor accept a bufferLimit int argument). --- .../HeapBufferedAsyncResponseConsumer.java | 108 ++++++++++++++++++ .../client/ResponseException.java | 2 +- .../org/elasticsearch/client/RestClient.java | 34 +++++- ...eapBufferedAsyncResponseConsumerTests.java | 104 +++++++++++++++++ 4 files changed, 243 insertions(+), 5 deletions(-) create mode 100644 client/rest/src/main/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumer.java create mode 100644 client/rest/src/test/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumerTests.java diff --git a/client/rest/src/main/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumer.java b/client/rest/src/main/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumer.java new file mode 100644 index 00000000000..61e06132212 --- /dev/null +++ b/client/rest/src/main/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumer.java @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.ContentTooLongException; +import org.apache.http.HttpEntity; +import org.apache.http.HttpException; +import org.apache.http.HttpResponse; +import org.apache.http.entity.ContentType; +import org.apache.http.nio.ContentDecoder; +import org.apache.http.nio.IOControl; +import org.apache.http.nio.entity.ContentBufferEntity; +import org.apache.http.nio.protocol.AbstractAsyncResponseConsumer; +import org.apache.http.nio.util.ByteBufferAllocator; +import org.apache.http.nio.util.HeapByteBufferAllocator; +import org.apache.http.nio.util.SimpleInputBuffer; +import org.apache.http.protocol.HttpContext; + +import java.io.IOException; + +/** + * Default implementation of {@link org.apache.http.nio.protocol.HttpAsyncResponseConsumer}. Buffers the whole + * response content in heap memory, meaning that the size of the buffer is equal to the content-length of the response. + * Limits the size of responses that can be read to {@link #DEFAULT_BUFFER_LIMIT} by default, configurable value. + * Throws an exception in case the entity is longer than the configured buffer limit. + */ +public class HeapBufferedAsyncResponseConsumer extends AbstractAsyncResponseConsumer { + + //default buffer limit is 10MB + public static final int DEFAULT_BUFFER_LIMIT = 10 * 1024 * 1024; + + private final int bufferLimit; + private volatile HttpResponse response; + private volatile SimpleInputBuffer buf; + + /** + * Creates a new instance of this consumer with a buffer limit of {@link #DEFAULT_BUFFER_LIMIT} + */ + public HeapBufferedAsyncResponseConsumer() { + this.bufferLimit = DEFAULT_BUFFER_LIMIT; + } + + /** + * Creates a new instance of this consumer with the provided buffer limit + */ + public HeapBufferedAsyncResponseConsumer(int bufferLimit) { + this.bufferLimit = bufferLimit; + } + + @Override + protected void onResponseReceived(HttpResponse response) throws HttpException, IOException { + this.response = response; + } + + @Override + protected void onEntityEnclosed(HttpEntity entity, ContentType contentType) throws IOException { + long len = entity.getContentLength(); + if (len > bufferLimit) { + throw new ContentTooLongException("entity content is too long [" + len + + "] for the configured buffer limit [" + bufferLimit + "]"); + } + if (len < 0) { + len = 4096; + } + this.buf = new SimpleInputBuffer((int) len, getByteBufferAllocator()); + this.response.setEntity(new ContentBufferEntity(entity, this.buf)); + } + + /** + * Returns the instance of {@link ByteBufferAllocator} to use for content buffering. + * Allows to plug in any {@link ByteBufferAllocator} implementation. + */ + protected ByteBufferAllocator getByteBufferAllocator() { + return HeapByteBufferAllocator.INSTANCE; + } + + @Override + protected void onContentReceived(ContentDecoder decoder, IOControl ioctrl) throws IOException { + this.buf.consumeContent(decoder); + } + + @Override + protected HttpResponse buildResult(HttpContext context) throws Exception { + return response; + } + + @Override + protected void releaseResources() { + response = null; + } +} diff --git a/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java b/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java index 5b6c4f1f0e7..35e31d921c7 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java +++ b/client/rest/src/main/java/org/elasticsearch/client/ResponseException.java @@ -25,7 +25,7 @@ import java.io.IOException; * Exception thrown when an elasticsearch node responds to a request with a status code that indicates an error. * Holds the response that was returned. */ -public class ResponseException extends IOException { +public final class ResponseException extends IOException { private Response response; diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 6b4e7f554fb..ab12fcb5816 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -172,9 +172,34 @@ public final class RestClient implements Closeable { */ public Response performRequest(String method, String endpoint, Map params, HttpEntity entity, Header... headers) throws Exception { - HttpAsyncResponseConsumer consumer = HttpAsyncMethods.createConsumer(); + HttpAsyncResponseConsumer responseConsumer = new HeapBufferedAsyncResponseConsumer(); + return performRequest(method, endpoint, params, entity, responseConsumer, headers); + } + + /** + * Sends a request to the elasticsearch cluster that the current client points to. Blocks until the request is completed and returns + * its response of fails by throwing an exception. Selects a host out of the provided ones in a round-robin fashion. Failing hosts + * are marked dead and retried after a certain amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times + * they previously failed (the more failures, the later they will be retried). In case of failures all of the alive nodes (or dead + * nodes that deserve a retry) are retried until one responds or none of them does, in which case an {@link IOException} will be thrown. + * + * @param method the http method + * @param endpoint the path of the request (without host and port) + * @param params the query_string parameters + * @param entity the body of the request, null if not applicable + * @param responseConsumer the {@link HttpAsyncResponseConsumer} callback. Controls how the response + * body gets streamed from a non-blocking HTTP connection on the client side. + * @param headers the optional request headers + * @return the response returned by elasticsearch + * @throws IOException in case of a problem or the connection was aborted + * @throws ClientProtocolException in case of an http protocol error + * @throws ResponseException in case elasticsearch responded with a status code that indicated an error + */ + public Response performRequest(String method, String endpoint, Map params, + HttpEntity entity, HttpAsyncResponseConsumer responseConsumer, + Header... headers) throws Exception { SyncResponseListener listener = new SyncResponseListener(); - performRequest(method, endpoint, params, entity, consumer, listener, headers); + performRequest(method, endpoint, params, entity, responseConsumer, listener, headers); return listener.get(); } @@ -225,7 +250,7 @@ public final class RestClient implements Closeable { */ public void performRequest(String method, String endpoint, Map params, HttpEntity entity, ResponseListener responseListener, Header... headers) { - HttpAsyncResponseConsumer responseConsumer = HttpAsyncMethods.createConsumer(); + HttpAsyncResponseConsumer responseConsumer = new HeapBufferedAsyncResponseConsumer(); performRequest(method, endpoint, params, entity, responseConsumer, responseListener, headers); } @@ -241,7 +266,8 @@ public final class RestClient implements Closeable { * @param endpoint the path of the request (without host and port) * @param params the query_string parameters * @param entity the body of the request, null if not applicable - * @param responseConsumer the {@link HttpAsyncResponseConsumer} callback + * @param responseConsumer the {@link HttpAsyncResponseConsumer} callback. Controls how the response + * body gets streamed from a non-blocking HTTP connection on the client side. * @param responseListener the {@link ResponseListener} to notify when the request is completed or fails * @param headers the optional request headers */ diff --git a/client/rest/src/test/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumerTests.java b/client/rest/src/test/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumerTests.java new file mode 100644 index 00000000000..1d209cd33b3 --- /dev/null +++ b/client/rest/src/test/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumerTests.java @@ -0,0 +1,104 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.ContentTooLongException; +import org.apache.http.HttpResponse; +import org.apache.http.ProtocolVersion; +import org.apache.http.StatusLine; +import org.apache.http.entity.BasicHttpEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.message.BasicHttpResponse; +import org.apache.http.message.BasicStatusLine; +import org.apache.http.nio.ContentDecoder; +import org.apache.http.nio.IOControl; +import org.apache.http.protocol.HttpContext; + +import static org.elasticsearch.client.HeapBufferedAsyncResponseConsumer.DEFAULT_BUFFER_LIMIT; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +public class HeapBufferedAsyncResponseConsumerTests extends RestClientTestCase { + + //maximum buffer that this test ends up allocating is 50MB + private static final int MAX_TEST_BUFFER_SIZE = 50 * 1024 * 1024; + + public void testResponseProcessing() throws Exception { + ContentDecoder contentDecoder = mock(ContentDecoder.class); + IOControl ioControl = mock(IOControl.class); + HttpContext httpContext = mock(HttpContext.class); + + HeapBufferedAsyncResponseConsumer consumer = spy(new HeapBufferedAsyncResponseConsumer()); + + ProtocolVersion protocolVersion = new ProtocolVersion("HTTP", 1, 1); + StatusLine statusLine = new BasicStatusLine(protocolVersion, 200, "OK"); + HttpResponse httpResponse = new BasicHttpResponse(statusLine); + httpResponse.setEntity(new StringEntity("test")); + + //everything goes well + consumer.responseReceived(httpResponse); + consumer.consumeContent(contentDecoder, ioControl); + consumer.responseCompleted(httpContext); + + verify(consumer).releaseResources(); + verify(consumer).buildResult(httpContext); + assertTrue(consumer.isDone()); + assertSame(httpResponse, consumer.getResult()); + + consumer.responseCompleted(httpContext); + verify(consumer, times(1)).releaseResources(); + verify(consumer, times(1)).buildResult(httpContext); + } + + public void testDefaultBufferLimit() throws Exception { + HeapBufferedAsyncResponseConsumer consumer = new HeapBufferedAsyncResponseConsumer(); + bufferLimitTest(consumer, DEFAULT_BUFFER_LIMIT); + } + + public void testConfiguredBufferLimit() throws Exception { + int bufferLimit = randomIntBetween(1, MAX_TEST_BUFFER_SIZE - 100); + HeapBufferedAsyncResponseConsumer consumer = new HeapBufferedAsyncResponseConsumer(bufferLimit); + bufferLimitTest(consumer, bufferLimit); + } + + private static void bufferLimitTest(HeapBufferedAsyncResponseConsumer consumer, int bufferLimit) throws Exception { + ProtocolVersion protocolVersion = new ProtocolVersion("HTTP", 1, 1); + StatusLine statusLine = new BasicStatusLine(protocolVersion, 200, "OK"); + consumer.onResponseReceived(new BasicHttpResponse(statusLine)); + + BasicHttpEntity entity = new BasicHttpEntity(); + entity.setContentLength(randomInt(bufferLimit)); + consumer.onEntityEnclosed(entity, ContentType.APPLICATION_JSON); + + entity.setContentLength(randomIntBetween(bufferLimit + 1, MAX_TEST_BUFFER_SIZE)); + try { + consumer.onEntityEnclosed(entity, ContentType.APPLICATION_JSON); + } catch(ContentTooLongException e) { + assertEquals("entity content is too long [" + entity.getContentLength() + + "] for the configured buffer limit [" + bufferLimit + "]", e.getMessage()); + } + } +} From a9b5c5adbe5ad2ba8acde1ddbc7349fa58bf9275 Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 19 Jul 2016 15:09:52 +0200 Subject: [PATCH 19/93] restore throws IOException clause on all performRequest sync methods We throw IOException, which is the exception that is going to be thrown in 99% of the cases. A more generic exception can happen, and if it is a runtime one we just let it bubble up as is, otherwise we wrap it into runtime one so that we don't require to catch Exception everywhere, which seems odd. Also adjusted javadocs for all performRequest methods --- .../org/elasticsearch/client/RestClient.java | 66 +++++++++++-------- .../client/RestClientIntegTests.java | 8 +-- .../client/RestClientMultipleHostsTests.java | 6 +- .../client/RestClientSingleHostTests.java | 16 ++--- .../client/sniff/HostsSniffer.java | 2 +- .../client/sniff/HostsSnifferTests.java | 2 +- .../http/ContextAndHeaderTransportIT.java | 3 +- .../org/elasticsearch/http/CorsNotSetIT.java | 6 +- .../org/elasticsearch/http/CorsRegexIT.java | 14 ++-- .../elasticsearch/http/DeprecationHttpIT.java | 4 +- .../http/DetailedErrorsDisabledIT.java | 3 +- .../http/DetailedErrorsEnabledIT.java | 3 +- .../elasticsearch/http/HttpCompressionIT.java | 5 +- .../http/ResponseHeaderPluginIT.java | 3 +- .../test/rest/ESRestTestCase.java | 6 +- .../test/rest/RestTestExecutionContext.java | 4 +- .../test/rest/client/RestTestClient.java | 6 +- .../client/RestTestResponseException.java | 2 +- .../test/rest/section/DoSection.java | 3 +- .../test/rest/section/ExecutableSection.java | 4 +- 20 files changed, 94 insertions(+), 72 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index ab12fcb5816..0ca4a4e94ce 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -121,8 +121,9 @@ public final class RestClient implements Closeable { } /** - * Sends a request to the elasticsearch cluster that the current client points to. - * Shortcut to {@link #performRequest(String, String, Map, HttpEntity, Header...)} but without parameters and request body. + * Sends a request to the elasticsearch cluster that the current client points to and waits for the corresponding response + * to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, Header...)} but without parameters + * and request body. * * @param method the http method * @param endpoint the path of the request (without host and port) @@ -132,13 +133,13 @@ public final class RestClient implements Closeable { * @throws ClientProtocolException in case of an http protocol error * @throws ResponseException in case elasticsearch responded with a status code that indicated an error */ - public Response performRequest(String method, String endpoint, Header... headers) throws Exception { + public Response performRequest(String method, String endpoint, Header... headers) throws IOException { return performRequest(method, endpoint, Collections.emptyMap(), (HttpEntity)null, headers); } /** - * Sends a request to the elasticsearch cluster that the current client points to. - * Shortcut to {@link #performRequest(String, String, Map, HttpEntity, Header...)} but without request body. + * Sends a request to the elasticsearch cluster that the current client points to and waits for the corresponding response + * to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, Header...)} but without request body. * * @param method the http method * @param endpoint the path of the request (without host and port) @@ -149,16 +150,15 @@ public final class RestClient implements Closeable { * @throws ClientProtocolException in case of an http protocol error * @throws ResponseException in case elasticsearch responded with a status code that indicated an error */ - public Response performRequest(String method, String endpoint, Map params, Header... headers) throws Exception { + public Response performRequest(String method, String endpoint, Map params, Header... headers) throws IOException { return performRequest(method, endpoint, params, (HttpEntity)null, headers); } /** - * Sends a request to the elasticsearch cluster that the current client points to. Blocks until the request is completed and returns - * its response of fails by throwing an exception. Selects a host out of the provided ones in a round-robin fashion. Failing hosts - * are marked dead and retried after a certain amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times - * they previously failed (the more failures, the later they will be retried). In case of failures all of the alive nodes (or dead - * nodes that deserve a retry) are retried until one responds or none of them does, in which case an {@link IOException} will be thrown. + * Sends a request to the elasticsearch cluster that the current client points to and waits for the corresponding response + * to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumer, Header...)} + * which doesn't require specifying an {@link HttpAsyncResponseConsumer} instance, {@link HeapBufferedAsyncResponseConsumer} + * will be used to consume the response body. * * @param method the http method * @param endpoint the path of the request (without host and port) @@ -171,7 +171,7 @@ public final class RestClient implements Closeable { * @throws ResponseException in case elasticsearch responded with a status code that indicated an error */ public Response performRequest(String method, String endpoint, Map params, - HttpEntity entity, Header... headers) throws Exception { + HttpEntity entity, Header... headers) throws IOException { HttpAsyncResponseConsumer responseConsumer = new HeapBufferedAsyncResponseConsumer(); return performRequest(method, endpoint, params, entity, responseConsumer, headers); } @@ -197,17 +197,16 @@ public final class RestClient implements Closeable { */ public Response performRequest(String method, String endpoint, Map params, HttpEntity entity, HttpAsyncResponseConsumer responseConsumer, - Header... headers) throws Exception { + Header... headers) throws IOException { SyncResponseListener listener = new SyncResponseListener(); performRequest(method, endpoint, params, entity, responseConsumer, listener, headers); return listener.get(); } /** - * Sends a request to the elasticsearch cluster that the current client points to. - * Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumer, ResponseListener, Header...)} - * but without parameters, request body and async response consumer. A default response consumer, specifically an instance of - * ({@link org.apache.http.nio.protocol.BasicAsyncResponseConsumer} will be created and used. + * Sends a request to the elasticsearch cluster that the current client points to. Doesn't wait for the response, instead + * the provided {@link ResponseListener} will be notified upon completion or failure. Shortcut to + * {@link #performRequest(String, String, Map, HttpEntity, ResponseListener, Header...)} but without parameters and request body. * * @param method the http method * @param endpoint the path of the request (without host and port) @@ -219,10 +218,9 @@ public final class RestClient implements Closeable { } /** - * Sends a request to the elasticsearch cluster that the current client points to. - * Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumer, ResponseListener, Header...)} - * but without request body and async response consumer. A default response consumer, specifically an instance of - * ({@link org.apache.http.nio.protocol.BasicAsyncResponseConsumer} will be created and used. + * Sends a request to the elasticsearch cluster that the current client points to. Doesn't wait for the response, instead + * the provided {@link ResponseListener} will be notified upon completion or failure. Shortcut to + * {@link #performRequest(String, String, Map, HttpEntity, ResponseListener, Header...)} but without request body. * * @param method the http method * @param endpoint the path of the request (without host and port) @@ -236,10 +234,11 @@ public final class RestClient implements Closeable { } /** - * Sends a request to the elasticsearch cluster that the current client points to. + * Sends a request to the elasticsearch cluster that the current client points to. Doesn't wait for the response, instead + * the provided {@link ResponseListener} will be notified upon completion or failure. * Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumer, ResponseListener, Header...)} - * but without an async response consumer, meaning that a {@link org.apache.http.nio.protocol.BasicAsyncResponseConsumer} - * will be created and used. + * which doesn't require specifying an {@link HttpAsyncResponseConsumer} instance, {@link HeapBufferedAsyncResponseConsumer} + * will be used to consume the response body. * * @param method the http method * @param endpoint the path of the request (without host and port) @@ -256,7 +255,7 @@ public final class RestClient implements Closeable { /** * Sends a request to the elasticsearch cluster that the current client points to. The request is executed asynchronously - * and the provided {@link ResponseListener} gets notified whenever it is completed or it fails. + * and the provided {@link ResponseListener} gets notified upon request completion or failure. * Selects a host out of the provided ones in a round-robin fashion. Failing hosts are marked dead and retried after a certain * amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times they previously failed (the more failures, * the later they will be retried). In case of failures all of the alive nodes (or dead nodes that deserve a retry) are retried @@ -541,14 +540,25 @@ public final class RestClient implements Closeable { latch.countDown(); } - Response get() throws Exception { - latch.await(); + Response get() throws IOException { + try { + latch.await(); + } catch (InterruptedException e) { + throw new RuntimeException("thread waiting for the response was interrupted", e); + } if (response != null) { assert exception == null; return response; } assert exception != null; - throw exception; + //try and leave the exception untouched as much as possible but we don't want to just add throws Exception clause everywhere + if (exception instanceof IOException) { + throw (IOException) exception; + } + if (exception instanceof RuntimeException){ + throw (RuntimeException) exception; + } + throw new IOException("error while performing request", exception); } } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java index b1f9b66557b..ceaccad846d 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java @@ -141,7 +141,7 @@ public class RestClientIntegTests extends RestClientTestCase { * to set/add headers to the {@link org.apache.http.client.HttpClient}. * Exercises the test http server ability to send back whatever headers it received. */ - public void testHeaders() throws Exception { + public void testHeaders() throws IOException { for (String method : getHttpMethods()) { Set standardHeaders = new HashSet<>( Arrays.asList("Connection", "Host", "User-agent", "Date")); @@ -189,7 +189,7 @@ public class RestClientIntegTests extends RestClientTestCase { * out of the box by {@link org.apache.http.client.HttpClient}. * Exercises the test http server ability to send back whatever body it received. */ - public void testDeleteWithBody() throws Exception { + public void testDeleteWithBody() throws IOException { bodyTest("DELETE"); } @@ -198,11 +198,11 @@ public class RestClientIntegTests extends RestClientTestCase { * out of the box by {@link org.apache.http.client.HttpClient}. * Exercises the test http server ability to send back whatever body it received. */ - public void testGetWithBody() throws Exception { + public void testGetWithBody() throws IOException { bodyTest("GET"); } - private void bodyTest(String method) throws Exception { + private void bodyTest(String method) throws IOException { String requestBody = "{ \"field\": \"value\" }"; StringEntity entity = new StringEntity(requestBody); int statusCode = randomStatusCode(getRandom()); diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java index 5fdf331a01e..2a2d279689a 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java @@ -104,7 +104,7 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { restClient = new RestClient(httpClient, 10000, new Header[0], httpHosts, failureListener); } - public void testRoundRobinOkStatusCodes() throws Exception { + public void testRoundRobinOkStatusCodes() throws IOException { int numIters = RandomInts.randomIntBetween(getRandom(), 1, 5); for (int i = 0; i < numIters; i++) { Set hostsSet = new HashSet<>(); @@ -120,7 +120,7 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { failureListener.assertNotCalled(); } - public void testRoundRobinNoRetryErrors() throws Exception { + public void testRoundRobinNoRetryErrors() throws IOException { int numIters = RandomInts.randomIntBetween(getRandom(), 1, 5); for (int i = 0; i < numIters; i++) { Set hostsSet = new HashSet<>(); @@ -153,7 +153,7 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { failureListener.assertNotCalled(); } - public void testRoundRobinRetryErrors() throws Exception { + public void testRoundRobinRetryErrors() throws IOException { String retryEndpoint = randomErrorRetryEndpoint(); try { restClient.performRequest(randomHttpMethod(getRandom()), retryEndpoint); diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index 62f8eb548f6..e347dfecc12 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -199,7 +199,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { /** * End to end test for ok status codes */ - public void testOkStatusCodes() throws Exception { + public void testOkStatusCodes() throws IOException { for (String method : getHttpMethods()) { for (int okStatusCode : getOkStatusCodes()) { Response response = performRequest(method, "/" + okStatusCode); @@ -212,7 +212,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { /** * End to end test for error status codes: they should cause an exception to be thrown, apart from 404 with HEAD requests */ - public void testErrorStatusCodes() throws Exception { + public void testErrorStatusCodes() throws IOException { for (String method : getHttpMethods()) { //error status codes should cause an exception to be thrown for (int errorStatusCode : getAllErrorStatusCodes()) { @@ -239,7 +239,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { } } - public void testIOExceptions() throws Exception { + public void testIOExceptions() throws IOException { for (String method : getHttpMethods()) { //IOExceptions should be let bubble up try { @@ -263,7 +263,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { * End to end test for request and response body. Exercises the mock http client ability to send back * whatever body it has received. */ - public void testBody() throws Exception { + public void testBody() throws IOException { String body = "{ \"field\": \"value\" }"; StringEntity entity = new StringEntity(body); for (String method : Arrays.asList("DELETE", "GET", "PATCH", "POST", "PUT")) { @@ -293,7 +293,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { } } - public void testNullHeaders() throws Exception { + public void testNullHeaders() throws IOException { String method = randomHttpMethod(getRandom()); int statusCode = randomStatusCode(getRandom()); try { @@ -310,7 +310,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { } } - public void testNullParams() throws Exception { + public void testNullParams() throws IOException { String method = randomHttpMethod(getRandom()); int statusCode = randomStatusCode(getRandom()); try { @@ -331,7 +331,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { * End to end test for request and response headers. Exercises the mock http client ability to send back * whatever headers it has received. */ - public void testHeaders() throws Exception { + public void testHeaders() throws IOException { for (String method : getHttpMethods()) { Map expectedHeaders = new HashMap<>(); for (Header defaultHeader : defaultHeaders) { @@ -447,7 +447,7 @@ public class RestClientSingleHostTests extends RestClientTestCase { return request; } - private Response performRequest(String method, String endpoint, Header... headers) throws Exception { + private Response performRequest(String method, String endpoint, Header... headers) throws IOException { switch(randomIntBetween(0, 2)) { case 0: return restClient.performRequest(method, endpoint, headers); diff --git a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/HostsSniffer.java b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/HostsSniffer.java index 4aae68caef0..d5b2c67ee02 100644 --- a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/HostsSniffer.java +++ b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/HostsSniffer.java @@ -61,7 +61,7 @@ public class HostsSniffer { /** * Calls the elasticsearch nodes info api, parses the response and returns all the found http hosts */ - public List sniffHosts() throws Exception { + public List sniffHosts() throws IOException { Response response = restClient.performRequest("get", "/_nodes/http", sniffRequestParams); return readHosts(response.getEntity()); } diff --git a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferTests.java b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferTests.java index 5a9fd4033d1..bb375834097 100644 --- a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferTests.java +++ b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferTests.java @@ -85,7 +85,7 @@ public class HostsSnifferTests extends RestClientTestCase { httpServer.stop(0); } - public void testSniffNodes() throws Exception { + public void testSniffNodes() throws IOException { HttpHost httpHost = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()); try (RestClient restClient = RestClient.builder(httpHost).build()) { HostsSniffer.Builder builder = HostsSniffer.builder(restClient).setSniffRequestTimeoutMillis(sniffRequestTimeout); diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java index f39abb445d8..1249f1deadf 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java @@ -51,6 +51,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; +import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -218,7 +219,7 @@ public class ContextAndHeaderTransportIT extends HttpSmokeTestCase { assertRequestsContainHeader(MultiTermVectorsRequest.class); } - public void testThatRelevantHttpHeadersBecomeRequestHeaders() throws Exception { + public void testThatRelevantHttpHeadersBecomeRequestHeaders() throws IOException { final String IRRELEVANT_HEADER = "SomeIrrelevantHeader"; Response response = getRestClient().performRequest("GET", "/" + queryIndex + "/_search", new BasicHeader(CUSTOM_HEADER, randomHeaderValue), new BasicHeader(IRRELEVANT_HEADER, randomHeaderValue)); diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsNotSetIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsNotSetIT.java index 576d5c0db96..bdda44c1b71 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsNotSetIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsNotSetIT.java @@ -23,12 +23,14 @@ import org.apache.http.message.BasicHeader; import org.elasticsearch.client.Response; import org.elasticsearch.test.ESIntegTestCase; +import java.io.IOException; + import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; public class CorsNotSetIT extends HttpSmokeTestCase { - public void testCorsSettingDefaultBehaviourDoesNotReturnAnything() throws Exception { + public void testCorsSettingDefaultBehaviourDoesNotReturnAnything() throws IOException { String corsValue = "http://localhost:9200"; Response response = getRestClient().performRequest("GET", "/", new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue)); @@ -37,7 +39,7 @@ public class CorsNotSetIT extends HttpSmokeTestCase { assertThat(response.getHeader("Access-Control-Allow-Credentials"), nullValue()); } - public void testThatOmittingCorsHeaderDoesNotReturnAnything() throws Exception { + public void testThatOmittingCorsHeaderDoesNotReturnAnything() throws IOException { Response response = getRestClient().performRequest("GET", "/"); assertThat(response.getStatusLine().getStatusCode(), is(200)); assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsRegexIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsRegexIT.java index 5bcef4828c9..ca5cc1d0762 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsRegexIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsRegexIT.java @@ -28,6 +28,8 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.jboss.netty.handler.codec.http.HttpHeaders; +import java.io.IOException; + import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_METHODS; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN; @@ -58,7 +60,7 @@ public class CorsRegexIT extends HttpSmokeTestCase { return true; } - public void testThatRegularExpressionWorksOnMatch() throws Exception { + public void testThatRegularExpressionWorksOnMatch() throws IOException { String corsValue = "http://localhost:9200"; Response response = getRestClient().performRequest("GET", "/", new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue)); @@ -71,7 +73,7 @@ public class CorsRegexIT extends HttpSmokeTestCase { assertThat(response.getHeader("Access-Control-Allow-Credentials"), is("true")); } - public void testThatRegularExpressionReturnsForbiddenOnNonMatch() throws Exception { + public void testThatRegularExpressionReturnsForbiddenOnNonMatch() throws IOException { try { getRestClient().performRequest("GET", "/", new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", "http://evil-host:9200")); @@ -84,19 +86,19 @@ public class CorsRegexIT extends HttpSmokeTestCase { } } - public void testThatSendingNoOriginHeaderReturnsNoAccessControlHeader() throws Exception { + public void testThatSendingNoOriginHeaderReturnsNoAccessControlHeader() throws IOException { Response response = getRestClient().performRequest("GET", "/", new BasicHeader("User-Agent", "Mozilla Bar")); assertThat(response.getStatusLine().getStatusCode(), is(200)); assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); } - public void testThatRegularExpressionIsNotAppliedWithoutCorrectBrowserOnMatch() throws Exception { + public void testThatRegularExpressionIsNotAppliedWithoutCorrectBrowserOnMatch() throws IOException { Response response = getRestClient().performRequest("GET", "/"); assertThat(response.getStatusLine().getStatusCode(), is(200)); assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); } - public void testThatPreFlightRequestWorksOnMatch() throws Exception { + public void testThatPreFlightRequestWorksOnMatch() throws IOException { String corsValue = "http://localhost:9200"; Response response = getRestClient().performRequest("OPTIONS", "/", new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue), @@ -105,7 +107,7 @@ public class CorsRegexIT extends HttpSmokeTestCase { assertNotNull(response.getHeader("Access-Control-Allow-Methods")); } - public void testThatPreFlightRequestReturnsNullOnNonMatch() throws Exception { + public void testThatPreFlightRequestReturnsNullOnNonMatch() throws IOException { try { getRestClient().performRequest("OPTIONS", "/", new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", "http://evil-host:9200"), diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java index 40c757a3b1f..778daa7714e 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java @@ -77,7 +77,7 @@ public class DeprecationHttpIT extends HttpSmokeTestCase { * Attempts to do a scatter/gather request that expects unique responses per sub-request. */ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/19222") - public void testUniqueDeprecationResponsesMergedTogether() throws Exception { + public void testUniqueDeprecationResponsesMergedTogether() throws IOException { final String[] indices = new String[randomIntBetween(2, 5)]; // add at least one document for each index @@ -135,7 +135,7 @@ public class DeprecationHttpIT extends HttpSmokeTestCase { *

* Re-running this back-to-back helps to ensure that warnings are not being maintained across requests. */ - private void doTestDeprecationWarningsAppearInHeaders() throws Exception { + private void doTestDeprecationWarningsAppearInHeaders() throws IOException { final boolean useDeprecatedField = randomBoolean(); final boolean useNonDeprecatedSetting = randomBoolean(); diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java index 5e53599b934..0fd4d6e6edd 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import java.io.IOException; import java.util.Collections; import static org.hamcrest.Matchers.is; @@ -52,7 +53,7 @@ public class DetailedErrorsDisabledIT extends HttpSmokeTestCase { return true; } - public void testThatErrorTraceParamReturns400() throws Exception { + public void testThatErrorTraceParamReturns400() throws IOException { try { getRestClient().performRequest("DELETE", "/", Collections.singletonMap("error_trace", "true")); fail("request should have failed"); diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsEnabledIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsEnabledIT.java index fb26e59a1a5..cd3cf1a38d8 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsEnabledIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsEnabledIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.test.ESIntegTestCase; +import java.io.IOException; import java.util.Collections; import static org.hamcrest.Matchers.containsString; @@ -34,7 +35,7 @@ import static org.hamcrest.Matchers.not; */ public class DetailedErrorsEnabledIT extends HttpSmokeTestCase { - public void testThatErrorTraceWorksByDefault() throws Exception { + public void testThatErrorTraceWorksByDefault() throws IOException { try { getRestClient().performRequest("DELETE", "/", Collections.singletonMap("error_trace", "true")); fail("request should have failed"); diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java index a6d9f833d22..e22b2e187ea 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.test.ESIntegTestCase; +import java.io.IOException; import java.util.Collections; public class HttpCompressionIT extends ESIntegTestCase { @@ -43,7 +44,7 @@ public class HttpCompressionIT extends ESIntegTestCase { return false; } - public void testCompressesResponseIfRequested() throws Exception { + public void testCompressesResponseIfRequested() throws IOException { ensureGreen(); try (RestClient client = getRestClient()) { Response response = client.performRequest("GET", "/", new BasicHeader(HttpHeaders.ACCEPT_ENCODING, GZIP_ENCODING)); @@ -52,7 +53,7 @@ public class HttpCompressionIT extends ESIntegTestCase { } } - public void testUncompressedResponseByDefault() throws Exception { + public void testUncompressedResponseByDefault() throws IOException { ensureGreen(); try (RestClient client = getRestClient()) { Response response = client.performRequest("GET", "/"); diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ResponseHeaderPluginIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ResponseHeaderPluginIT.java index 037549ada06..1e8b25b288d 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ResponseHeaderPluginIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ResponseHeaderPluginIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import java.util.ArrayList; +import java.io.IOException; import java.util.Collection; import static org.hamcrest.Matchers.equalTo; @@ -56,7 +57,7 @@ public class ResponseHeaderPluginIT extends HttpSmokeTestCase { return plugins; } - public void testThatSettingHeadersWorks() throws Exception { + public void testThatSettingHeadersWorks() throws IOException { ensureGreen(); try { getRestClient().performRequest("GET", "/_protected"); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index cbea7edd5af..16606e9fa91 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -270,7 +270,7 @@ public abstract class ESRestTestCase extends ESTestCase { } @After - public void wipeCluster() throws Exception { + public void wipeCluster() throws IOException { // wipe indices Map deleteIndicesArgs = new HashMap<>(); deleteIndicesArgs.put("index", "*"); @@ -299,7 +299,7 @@ public abstract class ESRestTestCase extends ESTestCase { * other tests. */ @After - public void logIfThereAreRunningTasks() throws Exception { + public void logIfThereAreRunningTasks() throws IOException { RestTestResponse tasks = adminExecutionContext.callApi("tasks.list", emptyMap(), emptyList(), emptyMap()); Set runningTasks = runningTasks(tasks); // Ignore the task list API - it doens't count against us @@ -378,7 +378,7 @@ public abstract class ESRestTestCase extends ESTestCase { return messageBuilder.toString(); } - public void test() throws Exception { + public void test() throws IOException { //let's check that there is something to run, otherwise there might be a problem with the test section if (testCandidate.getTestSection().getExecutableSections().size() == 0) { throw new IllegalArgumentException("No executable sections loaded for [" + testCandidate.getTestPath() + "]"); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java index 3be5a43e151..7ed46be4d8a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java @@ -62,7 +62,7 @@ public class RestTestExecutionContext implements Closeable { * Saves the obtained response in the execution context. */ public RestTestResponse callApi(String apiName, Map params, List> bodies, - Map headers) throws Exception { + Map headers) throws IOException { //makes a copy of the parameters before modifying them for this specific request HashMap requestParams = new HashMap<>(params); for (Map.Entry entry : requestParams.entrySet()) { @@ -105,7 +105,7 @@ public class RestTestExecutionContext implements Closeable { } private RestTestResponse callApiInternal(String apiName, Map params, String body, Map headers) - throws Exception { + throws IOException { return restTestClient.callApi(apiName, params, body, headers); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java index dfa59509790..56bca24532c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java @@ -84,7 +84,7 @@ public class RestTestClient implements Closeable { private final RestClient restClient; private final Version esVersion; - public RestTestClient(RestSpec restSpec, Settings settings, URL[] urls) throws Exception { + public RestTestClient(RestSpec restSpec, Settings settings, URL[] urls) throws IOException { assert urls.length > 0; this.restSpec = restSpec; this.restClient = createRestClient(urls, settings); @@ -92,7 +92,7 @@ public class RestTestClient implements Closeable { logger.info("REST client initialized {}, elasticsearch version: [{}]", urls, esVersion); } - private Version readAndCheckVersion(URL[] urls) throws Exception { + private Version readAndCheckVersion(URL[] urls) throws IOException { RestApi restApi = restApi("info"); assert restApi.getPaths().size() == 1; assert restApi.getMethods().size() == 1; @@ -127,7 +127,7 @@ public class RestTestClient implements Closeable { * Calls an api with the provided parameters and body */ public RestTestResponse callApi(String apiName, Map params, String body, Map headers) - throws Exception { + throws IOException { if ("raw".equals(apiName)) { // Raw requests are bit simpler.... diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponseException.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponseException.java index 0a1a086b4e5..2fc93a91088 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponseException.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponseException.java @@ -27,7 +27,7 @@ import java.io.IOException; * Exception obtained from a REST call in case the response code indicated an error. Eagerly reads the response body into a string * for later optional parsing. Supports parsing the response body when needed and returning specific values extracted from it. */ -public class RestTestResponseException extends Exception { +public class RestTestResponseException extends IOException { private final RestTestResponse restTestResponse; private final ResponseException responseException; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java index 1f60db33378..20b4f66f06c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java @@ -26,6 +26,7 @@ import org.elasticsearch.test.rest.RestTestExecutionContext; import org.elasticsearch.test.rest.client.RestTestResponse; import org.elasticsearch.test.rest.client.RestTestResponseException; +import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -78,7 +79,7 @@ public class DoSection implements ExecutableSection { } @Override - public void execute(RestTestExecutionContext executionContext) throws Exception { + public void execute(RestTestExecutionContext executionContext) throws IOException { if ("param".equals(catchParam)) { //client should throw validation error before sending request diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java index ece972b77f9..669d82cdd78 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java @@ -20,6 +20,8 @@ package org.elasticsearch.test.rest.section; import org.elasticsearch.test.rest.RestTestExecutionContext; +import java.io.IOException; + /** * Represents a test fragment that can be executed (e.g. api call, assertion) */ @@ -28,5 +30,5 @@ public interface ExecutableSection { /** * Executes the section passing in the execution context */ - void execute(RestTestExecutionContext executionContext) throws Exception; + void execute(RestTestExecutionContext executionContext) throws IOException; } From 569d7b3ecc416d90b2dcf4d321f1c8232c95e19c Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 19 Jul 2016 17:32:12 +0200 Subject: [PATCH 20/93] notify the listener if the request gets cancelled --- .../rest/src/main/java/org/elasticsearch/client/RestClient.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 0ca4a4e94ce..d8d45b3a3fd 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -58,6 +58,7 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -346,6 +347,7 @@ public final class RestClient implements Closeable { @Override public void cancelled() { + listener.onDefinitiveFailure(new ExecutionException("request was cancelled", null)); } }); } From fccfe7dcb8899361a46931a8c99dad861f2fb895 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 20 Jul 2016 16:24:29 +0200 Subject: [PATCH 21/93] RestClient javadocs adjustments --- .../org/elasticsearch/client/RestClient.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index d8d45b3a3fd..95f6bc136d2 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -122,7 +122,7 @@ public final class RestClient implements Closeable { } /** - * Sends a request to the elasticsearch cluster that the current client points to and waits for the corresponding response + * Sends a request to the elasticsearch cluster that the client points to and waits for the corresponding response * to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, Header...)} but without parameters * and request body. * @@ -139,7 +139,7 @@ public final class RestClient implements Closeable { } /** - * Sends a request to the elasticsearch cluster that the current client points to and waits for the corresponding response + * Sends a request to the elasticsearch cluster that the client points to and waits for the corresponding response * to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, Header...)} but without request body. * * @param method the http method @@ -156,7 +156,7 @@ public final class RestClient implements Closeable { } /** - * Sends a request to the elasticsearch cluster that the current client points to and waits for the corresponding response + * Sends a request to the elasticsearch cluster that the client points to and waits for the corresponding response * to be returned. Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumer, Header...)} * which doesn't require specifying an {@link HttpAsyncResponseConsumer} instance, {@link HeapBufferedAsyncResponseConsumer} * will be used to consume the response body. @@ -178,8 +178,8 @@ public final class RestClient implements Closeable { } /** - * Sends a request to the elasticsearch cluster that the current client points to. Blocks until the request is completed and returns - * its response of fails by throwing an exception. Selects a host out of the provided ones in a round-robin fashion. Failing hosts + * Sends a request to the elasticsearch cluster that the client points to. Blocks until the request is completed and returns + * its response or fails by throwing an exception. Selects a host out of the provided ones in a round-robin fashion. Failing hosts * are marked dead and retried after a certain amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times * they previously failed (the more failures, the later they will be retried). In case of failures all of the alive nodes (or dead * nodes that deserve a retry) are retried until one responds or none of them does, in which case an {@link IOException} will be thrown. @@ -205,7 +205,7 @@ public final class RestClient implements Closeable { } /** - * Sends a request to the elasticsearch cluster that the current client points to. Doesn't wait for the response, instead + * Sends a request to the elasticsearch cluster that the client points to. Doesn't wait for the response, instead * the provided {@link ResponseListener} will be notified upon completion or failure. Shortcut to * {@link #performRequest(String, String, Map, HttpEntity, ResponseListener, Header...)} but without parameters and request body. * @@ -219,7 +219,7 @@ public final class RestClient implements Closeable { } /** - * Sends a request to the elasticsearch cluster that the current client points to. Doesn't wait for the response, instead + * Sends a request to the elasticsearch cluster that the client points to. Doesn't wait for the response, instead * the provided {@link ResponseListener} will be notified upon completion or failure. Shortcut to * {@link #performRequest(String, String, Map, HttpEntity, ResponseListener, Header...)} but without request body. * @@ -235,7 +235,7 @@ public final class RestClient implements Closeable { } /** - * Sends a request to the elasticsearch cluster that the current client points to. Doesn't wait for the response, instead + * Sends a request to the elasticsearch cluster that the client points to. Doesn't wait for the response, instead * the provided {@link ResponseListener} will be notified upon completion or failure. * Shortcut to {@link #performRequest(String, String, Map, HttpEntity, HttpAsyncResponseConsumer, ResponseListener, Header...)} * which doesn't require specifying an {@link HttpAsyncResponseConsumer} instance, {@link HeapBufferedAsyncResponseConsumer} @@ -255,7 +255,7 @@ public final class RestClient implements Closeable { } /** - * Sends a request to the elasticsearch cluster that the current client points to. The request is executed asynchronously + * Sends a request to the elasticsearch cluster that the client points to. The request is executed asynchronously * and the provided {@link ResponseListener} gets notified upon request completion or failure. * Selects a host out of the provided ones in a round-robin fashion. Failing hosts are marked dead and retried after a certain * amount of time (minimum 1 minute, maximum 30 minutes), depending on how many times they previously failed (the more failures, From 59ccc88c73ed944bc9bfd56bd4498601a1b2fc37 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 20 Jul 2016 16:24:57 +0200 Subject: [PATCH 22/93] rename mustRetry method to mayRetry --- .../src/main/java/org/elasticsearch/client/RestClient.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 95f6bc136d2..32d608e385f 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -300,7 +300,7 @@ public final class RestClient implements Closeable { listener.onSuccess(response); } else { ResponseException responseException = new ResponseException(response); - if (mustRetry(statusCode)) { + if (mayRetry(statusCode)) { //mark host dead and retry against next one onFailure(host); retryIfPossible(responseException, hosts, request); @@ -437,7 +437,7 @@ public final class RestClient implements Closeable { return statusCode < 300 || (HttpHead.METHOD_NAME.equals(method) && statusCode == 404); } - private static boolean mustRetry(int statusCode) { + private static boolean mayRetry(int statusCode) { switch(statusCode) { case 502: case 503: From 175c327e17f11e4ee5bbff525d830251cfcee798 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 21 Jul 2016 13:59:28 +0200 Subject: [PATCH 23/93] validate bufferLimit is positive in HeapBufferedAsyncResponseConsumer --- .../client/HeapBufferedAsyncResponseConsumer.java | 3 +++ .../client/HeapBufferedAsyncResponseConsumerTests.java | 10 ++++++++++ 2 files changed, 13 insertions(+) diff --git a/client/rest/src/main/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumer.java b/client/rest/src/main/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumer.java index 61e06132212..da7f5c79721 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumer.java +++ b/client/rest/src/main/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumer.java @@ -61,6 +61,9 @@ public class HeapBufferedAsyncResponseConsumer extends AbstractAsyncResponseCons * Creates a new instance of this consumer with the provided buffer limit */ public HeapBufferedAsyncResponseConsumer(int bufferLimit) { + if (bufferLimit <= 0) { + throw new IllegalArgumentException("bufferLimit must be greater than 0"); + } this.bufferLimit = bufferLimit; } diff --git a/client/rest/src/test/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumerTests.java b/client/rest/src/test/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumerTests.java index 1d209cd33b3..d30a9e00b53 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumerTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/HeapBufferedAsyncResponseConsumerTests.java @@ -79,6 +79,16 @@ public class HeapBufferedAsyncResponseConsumerTests extends RestClientTestCase { } public void testConfiguredBufferLimit() throws Exception { + try { + new HeapBufferedAsyncResponseConsumer(randomIntBetween(Integer.MIN_VALUE, 0)); + } catch(IllegalArgumentException e) { + assertEquals("bufferLimit must be greater than 0", e.getMessage()); + } + try { + new HeapBufferedAsyncResponseConsumer(0); + } catch(IllegalArgumentException e) { + assertEquals("bufferLimit must be greater than 0", e.getMessage()); + } int bufferLimit = randomIntBetween(1, MAX_TEST_BUFFER_SIZE - 100); HeapBufferedAsyncResponseConsumer consumer = new HeapBufferedAsyncResponseConsumer(bufferLimit); bufferLimitTest(consumer, bufferLimit); From 3bcf2653bb9ad16a14c468eb9ad1a88631ebc0cc Mon Sep 17 00:00:00 2001 From: David Pilato Date: Thu, 21 Jul 2016 15:03:56 +0200 Subject: [PATCH 24/93] ping_timeout is documented in discovery-ec2 but does not exist in code Also mentioned in discovery-gce Actually ping timeout can be set using `discovery.zen.ping_timeout`. Closes #16600. --- docs/plugins/discovery-ec2.asciidoc | 12 +++--------- docs/plugins/discovery-gce.asciidoc | 6 +++--- 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/docs/plugins/discovery-ec2.asciidoc b/docs/plugins/discovery-ec2.asciidoc index 224080c522c..7a9cd3313b2 100644 --- a/docs/plugins/discovery-ec2.asciidoc +++ b/docs/plugins/discovery-ec2.asciidoc @@ -168,12 +168,6 @@ The following are a list of settings (prefixed with `discovery.ec2`) that can fu If set to `false`, will require all security groups to be present for the instance to be used for the discovery. Defaults to `true`. -`ping_timeout`:: - - How long to wait for existing EC2 nodes to reply during discovery. - Defaults to `3s`. If no unit like `ms`, `s` or `m` is specified, - milliseconds are used. - `node_cache_time`:: How long the list of hosts is cached to prevent further requests to the AWS API. @@ -243,9 +237,9 @@ filter instances with a tag key set to `stage`, and a value of `dev`. Several ta to be set for the instance to be included. One practical use for tag filtering is when an ec2 cluster contains many nodes that are not running elasticsearch. In -this case (particularly with high `ping_timeout` values) there is a risk that a new node's discovery phase will end -before it has found the cluster (which will result in it declaring itself master of a new cluster with the same name -- highly undesirable). Tagging elasticsearch ec2 nodes and then filtering by that tag will resolve this issue. +this case (particularly with high `discovery.zen.ping_timeout` values) there is a risk that a new node's discovery phase +will end before it has found the cluster (which will result in it declaring itself master of a new cluster with the same +name - highly undesirable). Tagging elasticsearch ec2 nodes and then filtering by that tag will resolve this issue. [[discovery-ec2-attributes]] ===== Automatic Node Attributes diff --git a/docs/plugins/discovery-gce.asciidoc b/docs/plugins/discovery-gce.asciidoc index 38dad90d1fc..f543bf92709 100644 --- a/docs/plugins/discovery-gce.asciidoc +++ b/docs/plugins/discovery-gce.asciidoc @@ -377,9 +377,9 @@ For example, setting `discovery.gce.tags` to `dev` will only filter instances ha set will require all of those tags to be set for the instance to be included. One practical use for tag filtering is when an GCE cluster contains many nodes that are not running -elasticsearch. In this case (particularly with high ping_timeout values) there is a risk that a new node's discovery -phase will end before it has found the cluster (which will result in it declaring itself master of a new cluster -with the same name - highly undesirable). Adding tag on elasticsearch GCE nodes and then filtering by that +elasticsearch. In this case (particularly with high `discovery.zen.ping_timeout` values) there is a risk that a new +node's discovery phase will end before it has found the cluster (which will result in it declaring itself master of a +new cluster with the same name - highly undesirable). Adding tag on elasticsearch GCE nodes and then filtering by that tag will resolve this issue. Add your tag when building the new instance: From 83a137b25c1abd460cabd81f6472e0e2b2ed978a Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Thu, 21 Jul 2016 09:14:41 -0400 Subject: [PATCH 25/93] Fixes REST test that is designed to timeout on index creation by making the test wait until all urgent requests are completed before finishing, so that tear down can properly delete the created index and cleanup. Without this wait, it was possible that the test would finish and cleanup the deleted indices would happen before the index creation even processed, causing the test to leave a created index behind. --- .../resources/rest-api-spec/test/indices.create/10_basic.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yaml index 3c6ad7a7051..cabfbfeb8b7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yaml @@ -45,6 +45,10 @@ - match: { shards_acknowledged: false } + - do: + cluster.health: + wait_for_events: urgent + --- "Create index with wait_for_active_shards set to all": From 4d89aa97e99e082015aaf42a92cbba84c4256206 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 21 Jul 2016 10:02:37 -0400 Subject: [PATCH 26/93] Vagrant tests should use plugin zips Fixes failing packaging tests: https://elasticsearch-ci.elastic.co/job/elastic+elasticsearch+master+packaging-tests/1049/console --- qa/vagrant/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 86523a734ad..303032299b2 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -101,7 +101,7 @@ dependencies { // Collect all the plugins for (Project subproj : project.rootProject.subprojects) { if (subproj.path.startsWith(':plugins:')) { - test project("${subproj.path}") + test project(path: "${subproj.path}", configuration: 'zip') } } From 9765b4a6ff9c368d2bfbeb7959b077fff4933011 Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Thu, 21 Jul 2016 11:47:21 -0400 Subject: [PATCH 27/93] Fixes the ActiveShardsObserverIT tests that have a very short index (#19540) creation timeout so they process the index creation cluster state update before the test finishes and attempts to cleanup. Otherwise, the index creation cluster state update could be processed after the test finishes and cleans up, thereby leaking an index in the cluster state that could cause issues for other tests that wouldn't expect the index to exist. Closes #19530 --- .../support/ActiveShardsObserverIT.java | 30 +++++++++++++++---- 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/action/support/ActiveShardsObserverIT.java b/core/src/test/java/org/elasticsearch/action/support/ActiveShardsObserverIT.java index ec3b5421e0e..1486df298ba 100644 --- a/core/src/test/java/org/elasticsearch/action/support/ActiveShardsObserverIT.java +++ b/core/src/test/java/org/elasticsearch/action/support/ActiveShardsObserverIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.action.support; import org.elasticsearch.action.ListenableActionFuture; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; @@ -44,12 +45,14 @@ public class ActiveShardsObserverIT extends ESIntegTestCase { settingsBuilder.put("index.routing.allocation.exclude._name", exclude); } Settings settings = settingsBuilder.build(); - assertFalse(prepareCreate("test-idx") + final String indexName = "test-idx"; + assertFalse(prepareCreate(indexName) .setSettings(settings) .setWaitForActiveShards(randomBoolean() ? ActiveShardCount.from(1) : ActiveShardCount.ALL) .setTimeout("100ms") .get() .isShardsAcked()); + waitForIndexCreationToComplete(indexName); } public void testCreateIndexNoActiveShardsNoWaiting() throws Exception { @@ -74,22 +77,24 @@ public class ActiveShardsObserverIT extends ESIntegTestCase { final int numReplicas = numDataNodes + randomInt(4); Settings settings = Settings.builder() .put(indexSettings()) - .put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 7)) + .put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 5)) .put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), numReplicas) .build(); - assertFalse(prepareCreate("test-idx") + final String indexName = "test-idx"; + assertFalse(prepareCreate(indexName) .setSettings(settings) .setWaitForActiveShards(ActiveShardCount.from(randomIntBetween(numDataNodes + 1, numReplicas + 1))) .setTimeout("100ms") .get() .isShardsAcked()); + waitForIndexCreationToComplete(indexName); } public void testCreateIndexEnoughActiveShards() throws Exception { final String indexName = "test-idx"; Settings settings = Settings.builder() .put(indexSettings()) - .put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 7)) + .put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 5)) .put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), internalCluster().numDataNodes() + randomIntBetween(0, 3)) .build(); ActiveShardCount waitForActiveShards = ActiveShardCount.from(randomIntBetween(0, internalCluster().numDataNodes())); @@ -104,12 +109,17 @@ public class ActiveShardsObserverIT extends ESIntegTestCase { .put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 5)) .put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), numReplicas) .build(); - assertFalse(prepareCreate("test-idx1") + final String indexName = "test-idx"; + assertFalse(prepareCreate(indexName) .setSettings(settings) .setWaitForActiveShards(ActiveShardCount.ALL) .setTimeout("100ms") .get() .isShardsAcked()); + waitForIndexCreationToComplete(indexName); + if (client().admin().indices().prepareExists(indexName).get().isExists()) { + client().admin().indices().prepareDelete(indexName).get(); + } // enough data nodes, all shards are active settings = Settings.builder() @@ -117,7 +127,7 @@ public class ActiveShardsObserverIT extends ESIntegTestCase { .put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 7)) .put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), internalCluster().numDataNodes() - 1) .build(); - assertAcked(prepareCreate("test-idx2").setSettings(settings).setWaitForActiveShards(ActiveShardCount.ALL).get()); + assertAcked(prepareCreate(indexName).setSettings(settings).setWaitForActiveShards(ActiveShardCount.ALL).get()); } public void testCreateIndexStopsWaitingWhenIndexDeleted() throws Exception { @@ -145,4 +155,12 @@ public class ActiveShardsObserverIT extends ESIntegTestCase { assertAcked(responseListener.get()); } + // Its possible that the cluster state update task that includes the create index hasn't processed before we timeout, + // and subsequently the test cleanup process does not delete the index in question because it does not see it, and + // only after the test cleanup does the index creation manifest in the cluster state. To take care of this problem + // and its potential ramifications, we wait here for the index creation cluster state update task to finish + private void waitForIndexCreationToComplete(final String indexName) { + client().admin().cluster().prepareHealth(indexName).setWaitForEvents(Priority.URGENT).get(); + } + } From 19e7b1c737052cca62c684f3ed8e4936f08666be Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Thu, 21 Jul 2016 14:27:04 -0700 Subject: [PATCH 28/93] fix: no other processors should be executed after on_failure is called in a compound processor (#19545) --- .../org/elasticsearch/ingest/CompoundProcessor.java | 1 + .../ingest/CompoundProcessorTests.java | 13 +++++++++++++ .../rest-api-spec/test/ingest/50_on_failure.yaml | 5 +++++ 3 files changed, 19 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java b/core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java index 4af2e8a811a..501e8c1b2f9 100644 --- a/core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java @@ -109,6 +109,7 @@ public class CompoundProcessor implements Processor { throw compoundProcessorException; } else { executeOnFailure(ingestDocument, compoundProcessorException); + break; } } } diff --git a/core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java index 6cb2c9dd53f..be6ec1059d6 100644 --- a/core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java @@ -193,4 +193,17 @@ public class CompoundProcessorTests extends ESTestCase { assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); assertThat(secondProcessor.getInvokedCounter(), equalTo(1)); } + + public void testBreakOnFailure() throws Exception { + TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error1");}); + TestProcessor secondProcessor = new TestProcessor("id2", "second", ingestDocument -> {throw new RuntimeException("error2");}); + TestProcessor onFailureProcessor = new TestProcessor("id2", "on_failure", ingestDocument -> {}); + CompoundProcessor pipeline = new CompoundProcessor(false, Arrays.asList(firstProcessor, secondProcessor), + Collections.singletonList(onFailureProcessor)); + pipeline.execute(ingestDocument); + assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); + assertThat(secondProcessor.getInvokedCounter(), equalTo(0)); + assertThat(onFailureProcessor.getInvokedCounter(), equalTo(1)); + + } } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/50_on_failure.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/50_on_failure.yaml index 464199f6d3b..a4395427e04 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/50_on_failure.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/50_on_failure.yaml @@ -19,6 +19,11 @@ "target_field" : "date", "formats" : ["yyyy"] } + }, + { + "uppercase" : { + "field": "field1" + } } ], "on_failure" : [ From bd574d92aed0566f0809b78321cd24add59fba59 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Fri, 22 Jul 2016 14:35:47 +0200 Subject: [PATCH 29/93] Verify lower level transport exceptions don't bubble up on disconnects (#19518) #19096 introduced a generic TCPTransport base class so we can have multiple TCP based transport implementation. These implementations can vary in how they respond internally to situations where we concurrently send, receive and handle disconnects and can have different exceptions. However, disconnects are important events for the rest of the code base and should be distinguished from other errors (for example, it signals TransportMasterAction that it needs to retry and wait for the a (new) master to come back). Therefore, we should make sure that all the implementations do the proper translation from their internal exceptions into ConnectTransportException which is used externally. Similarly we should make sure that the transport implementation properly recognize errors that were caused by a disconnect as such and deal with them correctly. This was, for example, the source of a build failure at https://elasticsearch-ci.elastic.co/job/elastic+elasticsearch+master+multijob-intake/1080 , where a concurrency issue cause SocketException to bubble out of MockTcpTransport. This PR adds a tests which concurrently simulates connects, disconnects, sending and receiving and makes sure the above holds. It also fixes anything (not much!) that was found it. --- .../transport/NetworkExceptionHelper.java | 3 + .../elasticsearch/transport/TcpTransport.java | 37 +++-- .../transport/local/LocalTransport.java | 30 +++- .../local/LocalTransportChannel.java | 7 +- .../AbstractSimpleTransportTestCase.java | 155 ++++++++++++++++-- 5 files changed, 188 insertions(+), 44 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/transport/NetworkExceptionHelper.java b/core/src/main/java/org/elasticsearch/common/transport/NetworkExceptionHelper.java index 77a39a8c22b..0317026b6be 100644 --- a/core/src/main/java/org/elasticsearch/common/transport/NetworkExceptionHelper.java +++ b/core/src/main/java/org/elasticsearch/common/transport/NetworkExceptionHelper.java @@ -55,6 +55,9 @@ public class NetworkExceptionHelper { if (e.getMessage().contains("Connection timed out")) { return true; } + if (e.getMessage().equals("Socket is closed")) { + return true; + } } return false; } diff --git a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java index b0139902a42..0e601ecb5b5 100644 --- a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -918,19 +918,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i transportServiceAdapter.onRequestSent(node, requestId, action, request, finalOptions); } }; - try { - sendMessage(targetChannel, message, onRequestSent, false); - } catch (IOException ex) { - if (nodeConnected(node)) { - throw ex; - } else { - // we might got disconnected in between the nodeChannel(node, options) call and the sending - - // in that case throw a subclass of ConnectTransportException since some code retries based on this - // see TransportMasterNodeAction for instance - throw new NodeNotConnectedException(node, "Node not connected"); - } - } - addedReleaseListener = true; + addedReleaseListener = internalSendMessage(targetChannel, message, onRequestSent); } finally { IOUtils.close(stream); if (!addedReleaseListener) { @@ -939,6 +927,25 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i } } + /** + * sends a message view the given channel, using the given callbacks. + * + * @return true if the message was successfully sent or false when an error occurred and the error hanlding logic was activated + * + */ + private boolean internalSendMessage(Channel targetChannel, BytesReference message, Runnable onRequestSent) throws IOException { + boolean success; + try { + sendMessage(targetChannel, message, onRequestSent, false); + success = true; + } catch (IOException ex) { + // passing exception handling to deal with this and raise disconnect events and decide the right logging level + onException(targetChannel, ex); + success = false; + } + return success; + } + /** * Sends back an error response to the caller via the given channel * @param nodeVersion the caller node version @@ -997,9 +1004,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i transportServiceAdapter.onResponseSent(requestId, action, response, finalOptions); } }; - sendMessage(channel, reference, onRequestSent, false); - addedReleaseListener = true; - + addedReleaseListener = internalSendMessage(channel, reference, onRequestSent); } finally { IOUtils.close(stream); if (!addedReleaseListener) { diff --git a/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java b/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java index eba5fd57734..c94e62ea422 100644 --- a/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.threadpool.ThreadPool; @@ -230,12 +231,30 @@ public class LocalTransport extends AbstractLifecycleComponent implements Transp final byte[] data = BytesReference.toBytes(stream.bytes()); transportServiceAdapter.sent(data.length); transportServiceAdapter.onRequestSent(node, requestId, action, request, options); - targetTransport.workers().execute(() -> { - ThreadContext threadContext = targetTransport.threadPool.getThreadContext(); + targetTransport.receiveMessage(version, data, action, requestId, this); + } + } + + /** + * entry point for incoming messages + * + * @param version the version used to serialize the message + * @param data message data + * @param action the action associated with this message (only used for error handling when data is not parsable) + * @param requestId requestId if the message is request (only used for error handling when data is not parsable) + * @param sourceTransport the source transport to respond to. + */ + public void receiveMessage(Version version, byte[] data, String action, @Nullable Long requestId, LocalTransport sourceTransport) { + try { + workers().execute(() -> { + ThreadContext threadContext = threadPool.getThreadContext(); try (ThreadContext.StoredContext context = threadContext.stashContext()) { - targetTransport.messageReceived(data, action, LocalTransport.this, version, requestId); + processReceivedMessage(data, action, sourceTransport, version, requestId); } }); + } catch (EsRejectedExecutionException e) { + assert lifecycle.started() == false; + logger.trace("received request but shutting down. ignoring. action [{}], request id [{}]", action, requestId); } } @@ -248,8 +267,9 @@ public class LocalTransport extends AbstractLifecycleComponent implements Transp return circuitBreakerService.getBreaker(CircuitBreaker.IN_FLIGHT_REQUESTS); } - protected void messageReceived(byte[] data, String action, LocalTransport sourceTransport, Version version, - @Nullable final Long sendRequestId) { + /** processes received messages, assuming thread passing and thread context have all been dealt with */ + protected void processReceivedMessage(byte[] data, String action, LocalTransport sourceTransport, Version version, + @Nullable final Long sendRequestId) { Transports.assertTransportThread(); try { transportServiceAdapter.received(data.length); diff --git a/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java index fc748b96aea..0c1e8747a12 100644 --- a/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java +++ b/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java @@ -107,12 +107,7 @@ public class LocalTransportChannel implements TransportChannel { private void sendResponseData(byte[] data) { close(); - targetTransport.workers().execute(() -> { - ThreadContext threadContext = targetTransport.threadPool.getThreadContext(); - try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { - targetTransport.messageReceived(data, action, sourceTransport, version, null); - } - }); + targetTransport.receiveMessage(version, data, action, null, sourceTransport); } private void close() { diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 42275c75e5a..87087e772ab 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -21,6 +21,8 @@ package org.elasticsearch.transport; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,6 +30,8 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; @@ -41,16 +45,19 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -75,24 +82,10 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { public void setUp() throws Exception { super.setUp(); threadPool = new TestThreadPool(getClass().getName()); - serviceA = build( - Settings.builder() - .put("name", "TS_A") - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") - .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") - .build(), - version0); - serviceA.acceptIncomingRequests(); + serviceA = buildService("TS_A", version0); nodeA = new DiscoveryNode("TS_A", serviceA.boundAddress().publishAddress(), emptyMap(), emptySet(), version0); // serviceA.setLocalNode(nodeA); - serviceB = build( - Settings.builder() - .put("name", "TS_B") - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") - .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") - .build(), - version1); - serviceB.acceptIncomingRequests(); + serviceB = buildService("TS_B", version1); nodeB = new DiscoveryNode("TS_B", serviceB.boundAddress().publishAddress(), emptyMap(), emptySet(), version1); //serviceB.setLocalNode(nodeB); // wait till all nodes are properly connected and the event has been sent, so tests in this class @@ -131,6 +124,18 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { serviceB.removeConnectionListener(waitForConnection); } + private MockTransportService buildService(final String name, final Version version) { + MockTransportService service = build( + Settings.builder() + .put("name", name) + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .build(), + version); + service.acceptIncomingRequests(); + return service; + } + @Override @After public void tearDown() throws Exception { @@ -483,6 +488,122 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { assertThat(latch.await(5, TimeUnit.SECONDS), equalTo(true)); } + public void testConcurrentSendRespondAndDisconnect() throws BrokenBarrierException, InterruptedException { + Set sendingErrors = ConcurrentCollections.newConcurrentSet(); + Set responseErrors = ConcurrentCollections.newConcurrentSet(); + serviceA.registerRequestHandler("test", TestRequest::new, + randomBoolean() ? ThreadPool.Names.SAME : ThreadPool.Names.GENERIC, (request, channel) -> { + try { + channel.sendResponse(new TestResponse()); + } catch (Exception e) { + logger.info("caught exception while responding", e); + responseErrors.add(e); + } + }); + final TransportRequestHandler ignoringRequestHandler = (request, channel) -> { + try { + channel.sendResponse(new TestResponse()); + } catch (Exception e) { + // we don't really care what's going on B, we're testing through A + logger.trace("caught exception while res ponding from node B", e); + } + }; + serviceB.registerRequestHandler("test", TestRequest::new, ThreadPool.Names.SAME, ignoringRequestHandler); + + int halfSenders = scaledRandomIntBetween(3, 10); + final CyclicBarrier go = new CyclicBarrier(halfSenders * 2 + 1); + final CountDownLatch done = new CountDownLatch(halfSenders * 2); + for (int i = 0; i < halfSenders; i++) { + // B senders just generated activity so serciveA can respond, we don't test what's going on there + final int sender = i; + threadPool.executor(ThreadPool.Names.GENERIC).execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + logger.trace("caught exception while sending from B", e); + } + + @Override + protected void doRun() throws Exception { + go.await(); + for (int iter = 0; iter < 10; iter++) { + PlainActionFuture listener = new PlainActionFuture<>(); + final String info = sender + "_B_" + iter; + serviceB.sendRequest(nodeA, "test", new TestRequest(info), + new ActionListenerResponseHandler<>(listener, TestResponse::new)); + try { + listener.actionGet(); + + } catch (Exception e) { + logger.trace("caught exception while sending to node {}", e, nodeA); + } + } + } + + @Override + public void onAfter() { + done.countDown(); + } + }); + } + + for (int i = 0; i < halfSenders; i++) { + final int sender = i; + threadPool.executor(ThreadPool.Names.GENERIC).execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + logger.error("unexpected error", e); + sendingErrors.add(e); + } + + @Override + protected void doRun() throws Exception { + go.await(); + for (int iter = 0; iter < 10; iter++) { + PlainActionFuture listener = new PlainActionFuture<>(); + final String info = sender + "_" + iter; + serviceA.sendRequest(nodeB, "test", new TestRequest(info), + new ActionListenerResponseHandler<>(listener, TestResponse::new)); + try { + listener.actionGet(); + } catch (ConnectTransportException e) { + // ok! + } catch (Exception e) { + logger.error("caught exception while sending to node {}", e, nodeB); + sendingErrors.add(e); + } + } + } + + @Override + public void onAfter() { + done.countDown(); + } + }); + } + go.await(); + for (int i = 0; i <= 10; i++) { + if (i % 3 == 0) { + // simulate restart of nodeB + serviceB.close(); + MockTransportService newService = buildService("TS_B", version1); + newService.registerRequestHandler("test", TestRequest::new, ThreadPool.Names.SAME, ignoringRequestHandler); + serviceB = newService; + nodeB = new DiscoveryNode("TS_B", serviceB.boundAddress().publishAddress(), emptyMap(), emptySet(), version1); + serviceB.connectToNode(nodeA); + serviceA.connectToNode(nodeB); + } else if (serviceA.nodeConnected(nodeB)) { + serviceA.disconnectFromNode(nodeB); + } else { + serviceA.connectToNode(nodeB); + } + } + + done.await(); + + assertThat("found non connection errors while sending", sendingErrors, empty()); + assertThat("found non connection errors while responding", responseErrors, empty()); + } + public void testNotifyOnShutdown() throws Exception { final CountDownLatch latch2 = new CountDownLatch(1); From 37e075a506c0eaeebc7e3146c07d155091af36c6 Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 21 Jul 2016 15:17:42 +0200 Subject: [PATCH 30/93] Make SyncResponseListener safer Throw explicit IllegalStateException in unexpected situations, like where both response and exception are set, or when both are unset. Add unit test for SyncResponseListener. --- .../org/elasticsearch/client/RestClient.java | 69 +++++-- .../client/SyncResponseListenerTests.java | 172 ++++++++++++++++++ 2 files changed, 222 insertions(+), 19 deletions(-) create mode 100644 client/rest/src/test/java/org/elasticsearch/client/SyncResponseListenerTests.java diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 32d608e385f..e7f4c58edb8 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -61,6 +61,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; /** * Client that connects to an elasticsearch cluster through http. @@ -199,7 +200,7 @@ public final class RestClient implements Closeable { public Response performRequest(String method, String endpoint, Map params, HttpEntity entity, HttpAsyncResponseConsumer responseConsumer, Header... headers) throws IOException { - SyncResponseListener listener = new SyncResponseListener(); + SyncResponseListener listener = new SyncResponseListener(maxRetryTimeoutMillis); performRequest(method, endpoint, params, entity, responseConsumer, listener, headers); return listener.get(); } @@ -525,42 +526,72 @@ public final class RestClient implements Closeable { } } - private static class SyncResponseListener implements ResponseListener { - final CountDownLatch latch = new CountDownLatch(1); - volatile Response response; - volatile Exception exception; + static class SyncResponseListener implements ResponseListener { + private final CountDownLatch latch = new CountDownLatch(1); + private final AtomicReference response = new AtomicReference<>(); + private final AtomicReference exception = new AtomicReference<>(); + + private final long timeout; + + SyncResponseListener(long timeout) { + assert timeout > 0; + this.timeout = timeout; + } @Override public void onSuccess(Response response) { - this.response = response; + Objects.requireNonNull(response, "response must not be null"); + boolean wasResponseNull = this.response.compareAndSet(null, response); + if (wasResponseNull == false) { + throw new IllegalStateException("response is already set"); + } + latch.countDown(); } @Override public void onFailure(Exception exception) { - this.exception = exception; + Objects.requireNonNull(exception, "exception must not be null"); + boolean wasExceptionNull = this.exception.compareAndSet(null, exception); + if (wasExceptionNull == false) { + throw new IllegalStateException("exception is already set"); + } latch.countDown(); } Response get() throws IOException { try { - latch.await(); + //providing timeout is just a safety measure to prevent everlasting waits + //the different client timeouts should already do their jobs + if (latch.await(timeout, TimeUnit.MILLISECONDS) == false) { + throw new IOException("listener timeout after waiting for [" + timeout + "] ms"); + } } catch (InterruptedException e) { throw new RuntimeException("thread waiting for the response was interrupted", e); } - if (response != null) { - assert exception == null; - return response; + + Exception exception = this.exception.get(); + Response response = this.response.get(); + if (exception != null) { + if (response != null) { + IllegalStateException e = new IllegalStateException("response and exception are unexpectedly set at the same time"); + e.addSuppressed(exception); + throw e; + } + //try and leave the exception untouched as much as possible but we don't want to just add throws Exception clause everywhere + if (exception instanceof IOException) { + throw (IOException) exception; + } + if (exception instanceof RuntimeException){ + throw (RuntimeException) exception; + } + throw new RuntimeException("error while performing request", exception); } - assert exception != null; - //try and leave the exception untouched as much as possible but we don't want to just add throws Exception clause everywhere - if (exception instanceof IOException) { - throw (IOException) exception; + + if (response == null) { + throw new IllegalStateException("response not set and no exception caught either"); } - if (exception instanceof RuntimeException){ - throw (RuntimeException) exception; - } - throw new IOException("error while performing request", exception); + return response; } } diff --git a/client/rest/src/test/java/org/elasticsearch/client/SyncResponseListenerTests.java b/client/rest/src/test/java/org/elasticsearch/client/SyncResponseListenerTests.java new file mode 100644 index 00000000000..154efb4cac3 --- /dev/null +++ b/client/rest/src/test/java/org/elasticsearch/client/SyncResponseListenerTests.java @@ -0,0 +1,172 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.HttpHost; +import org.apache.http.HttpResponse; +import org.apache.http.ProtocolVersion; +import org.apache.http.RequestLine; +import org.apache.http.StatusLine; +import org.apache.http.message.BasicHttpResponse; +import org.apache.http.message.BasicRequestLine; +import org.apache.http.message.BasicStatusLine; + +import java.io.IOException; +import java.net.URISyntaxException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.fail; + +public class SyncResponseListenerTests extends RestClientTestCase { + + public void testOnSuccessNullResponse() { + RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); + try { + syncResponseListener.onSuccess(null); + fail("onSuccess should have failed"); + } catch(NullPointerException e) { + assertEquals("response must not be null", e.getMessage()); + } + } + + public void testOnFailureNullException() { + RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); + try { + syncResponseListener.onFailure(null); + fail("onFailure should have failed"); + } catch(NullPointerException e) { + assertEquals("exception must not be null", e.getMessage()); + } + } + + public void testOnSuccess() throws Exception { + RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); + Response mockResponse = mockResponse(); + syncResponseListener.onSuccess(mockResponse); + Response response = syncResponseListener.get(); + assertSame(response, mockResponse); + + try { + syncResponseListener.onSuccess(mockResponse); + fail("get should have failed"); + } catch(IllegalStateException e) { + assertEquals(e.getMessage(), "response is already set"); + } + response = syncResponseListener.get(); + assertSame(response, mockResponse); + + RuntimeException runtimeException = new RuntimeException("test"); + syncResponseListener.onFailure(runtimeException); + try { + syncResponseListener.get(); + fail("get should have failed"); + } catch(IllegalStateException e) { + assertEquals("response and exception are unexpectedly set at the same time", e.getMessage()); + assertNotNull(e.getSuppressed()); + assertEquals(1, e.getSuppressed().length); + assertSame(runtimeException, e.getSuppressed()[0]); + } + } + + public void testOnFailure() throws Exception { + RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); + RuntimeException firstException = new RuntimeException("first-test"); + syncResponseListener.onFailure(firstException); + try { + syncResponseListener.get(); + fail("get should have failed"); + } catch(RuntimeException e) { + assertSame(firstException, e); + } + + RuntimeException secondException = new RuntimeException("second-test"); + try { + syncResponseListener.onFailure(secondException); + } catch(IllegalStateException e) { + assertEquals(e.getMessage(), "exception is already set"); + } + try { + syncResponseListener.get(); + fail("get should have failed"); + } catch(RuntimeException e) { + assertSame(firstException, e); + } + + Response response = mockResponse(); + syncResponseListener.onSuccess(response); + try { + syncResponseListener.get(); + fail("get should have failed"); + } catch(IllegalStateException e) { + assertEquals("response and exception are unexpectedly set at the same time", e.getMessage()); + assertNotNull(e.getSuppressed()); + assertEquals(1, e.getSuppressed().length); + assertSame(firstException, e.getSuppressed()[0]); + } + } + + public void testRuntimeExceptionIsNotWrapped() throws Exception { + RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); + RuntimeException runtimeException = new RuntimeException(); + syncResponseListener.onFailure(runtimeException); + try { + syncResponseListener.get(); + fail("get should have failed"); + } catch(RuntimeException e) { + assertSame(runtimeException, e); + } + } + + public void testIOExceptionIsNotWrapped() throws Exception { + RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); + IOException ioException = new IOException(); + syncResponseListener.onFailure(ioException); + try { + syncResponseListener.get(); + fail("get should have failed"); + } catch(IOException e) { + assertSame(ioException, e); + } + } + + public void testExceptionIsWrapped() throws Exception { + RestClient.SyncResponseListener syncResponseListener = new RestClient.SyncResponseListener(10000); + //we just need any checked exception + URISyntaxException exception = new URISyntaxException("test", "test"); + syncResponseListener.onFailure(exception); + try { + syncResponseListener.get(); + fail("get should have failed"); + } catch(RuntimeException e) { + assertEquals("error while performing request", e.getMessage()); + assertSame(exception, e.getCause()); + } + } + + private static Response mockResponse() { + ProtocolVersion protocolVersion = new ProtocolVersion("HTTP", 1, 1); + RequestLine requestLine = new BasicRequestLine("GET", "/", protocolVersion); + StatusLine statusLine = new BasicStatusLine(protocolVersion, 200, "OK"); + HttpResponse httpResponse = new BasicHttpResponse(statusLine); + return new Response(requestLine, new HttpHost("localhost", 9200), httpResponse); + } +} From a579866b426523831cfdf673b0002ac2a309d9e0 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jul 2016 15:49:39 +0200 Subject: [PATCH 31/93] rename mayRetry to isRetryStatus --- .../src/main/java/org/elasticsearch/client/RestClient.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index e7f4c58edb8..4faf6321a8d 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -301,7 +301,7 @@ public final class RestClient implements Closeable { listener.onSuccess(response); } else { ResponseException responseException = new ResponseException(response); - if (mayRetry(statusCode)) { + if (isRetryStatus(statusCode)) { //mark host dead and retry against next one onFailure(host); retryIfPossible(responseException, hosts, request); @@ -438,7 +438,7 @@ public final class RestClient implements Closeable { return statusCode < 300 || (HttpHead.METHOD_NAME.equals(method) && statusCode == 404); } - private static boolean mayRetry(int statusCode) { + private static boolean isRetryStatus(int statusCode) { switch(statusCode) { case 502: case 503: From 6adfeb4ddeae385222f984347d64d2baabb036e7 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 22 Jul 2016 16:29:14 +0200 Subject: [PATCH 32/93] [TEST] Add Netty3RestIT to actually run REST tests on netty3 --- .../http/netty3/Netty3RestIT.java | 40 +++++++++++++++++++ .../rest-api-spec/test/10_basic.yaml | 2 +- 2 files changed, 41 insertions(+), 1 deletion(-) create mode 100644 modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3RestIT.java diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3RestIT.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3RestIT.java new file mode 100644 index 00000000000..a276922354a --- /dev/null +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3RestIT.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty3; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; + +import java.io.IOException; + +public class Netty3RestIT extends ESRestTestCase { + + public Netty3RestIT(@Name("yaml") RestTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return ESRestTestCase.createParameters(0, 1); + } +} diff --git a/modules/transport-netty3/src/test/resources/rest-api-spec/test/10_basic.yaml b/modules/transport-netty3/src/test/resources/rest-api-spec/test/10_basic.yaml index eaf51de4484..e25074fb90c 100644 --- a/modules/transport-netty3/src/test/resources/rest-api-spec/test/10_basic.yaml +++ b/modules/transport-netty3/src/test/resources/rest-api-spec/test/10_basic.yaml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: transport-netty3 } \ No newline at end of file + - match: { nodes.$master.modules.0.name: transport-netty3 } From a6a685b0f6733c3b8cd6b00e9995335361dfa0d9 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jul 2016 16:36:14 +0200 Subject: [PATCH 33/93] make Response class final --- .../rest/src/main/java/org/elasticsearch/client/Response.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/Response.java b/client/rest/src/main/java/org/elasticsearch/client/Response.java index 91ca0a6c935..7edaa49c2ea 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/Response.java +++ b/client/rest/src/main/java/org/elasticsearch/client/Response.java @@ -32,7 +32,7 @@ import java.util.Objects; * Holds an elasticsearch response. It wraps the {@link HttpResponse} returned and associates it with * its corresponding {@link RequestLine} and {@link HttpHost}. */ -public class Response { +public final class Response { private final RequestLine requestLine; private final HttpHost host; From d13a3d3761cc391fbca5e9c1215405b01479418a Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jul 2016 16:46:17 +0200 Subject: [PATCH 34/93] Reindex from remote: add fallback in case content-type header is not set We better read the header, but who knows what can happen, maybe headers are filtered out for some reasons and we don't want to run into an NPE, then we fallback to auto-detection. --- .../reindex/remote/RemoteScrollableHitSource.java | 14 +++++++++++--- .../remote/RemoteScrollableHitSourceTests.java | 3 ++- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java index c96b3efca8a..eee3e2c59ae 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.reindex.ScrollableHitSource; @@ -139,9 +140,16 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { public void onSuccess(org.elasticsearch.client.Response response) { T parsedResponse; try { - InputStream content = response.getEntity().getContent(); - XContentType xContentType = XContentType.fromMediaTypeOrFormat( - response.getEntity().getContentType().getValue()); + HttpEntity responseEntity = response.getEntity(); + InputStream content = responseEntity.getContent(); + XContentType xContentType = null; + if (responseEntity.getContentType() != null) { + xContentType = XContentType.fromMediaTypeOrFormat(responseEntity.getContentType().getValue()); + } + if (xContentType == null) { + //auto-detect as a fallback + xContentType = XContentFactory.xContentType(content); + } try(XContentParser xContentParser = xContentType.xContent().createParser(content)) { parsedResponse = parser.apply(xContentParser, () -> ParseFieldMatcher.STRICT); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index c9dd25ec130..6af4dab9405 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -361,7 +361,8 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { } else { StatusLine statusLine = new BasicStatusLine(protocolVersion, 200, ""); HttpResponse httpResponse = new BasicHttpResponse(statusLine); - httpResponse.setEntity(new InputStreamEntity(resource.openStream(), ContentType.APPLICATION_JSON)); + httpResponse.setEntity(new InputStreamEntity(resource.openStream(), + randomBoolean() ? ContentType.APPLICATION_JSON : null)); futureCallback.completed(httpResponse); } return null; From 835d8cecdc4b533a95e8df8d897d0787190190f7 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jul 2016 17:31:11 +0200 Subject: [PATCH 35/93] [TEST] add unit tests for internal TrackingFailureListener Any provided listener will always be wrapped into FailureTrackingListener to handle retries --- .../org/elasticsearch/client/RestClient.java | 2 +- .../FailureTrackingResponseListenerTests.java | 107 ++++++++++++++++++ 2 files changed, 108 insertions(+), 1 deletion(-) create mode 100644 client/rest/src/test/java/org/elasticsearch/client/FailureTrackingResponseListenerTests.java diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 4faf6321a8d..eb51d6982f0 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -504,7 +504,7 @@ public final class RestClient implements Closeable { } } - private static class FailureTrackingResponseListener { + static class FailureTrackingResponseListener { private final ResponseListener responseListener; private volatile Exception exception; diff --git a/client/rest/src/test/java/org/elasticsearch/client/FailureTrackingResponseListenerTests.java b/client/rest/src/test/java/org/elasticsearch/client/FailureTrackingResponseListenerTests.java new file mode 100644 index 00000000000..f6ec388d09d --- /dev/null +++ b/client/rest/src/test/java/org/elasticsearch/client/FailureTrackingResponseListenerTests.java @@ -0,0 +1,107 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.HttpHost; +import org.apache.http.HttpResponse; +import org.apache.http.ProtocolVersion; +import org.apache.http.RequestLine; +import org.apache.http.StatusLine; +import org.apache.http.message.BasicHttpResponse; +import org.apache.http.message.BasicRequestLine; +import org.apache.http.message.BasicStatusLine; + +import java.util.concurrent.atomic.AtomicReference; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; + +public class FailureTrackingResponseListenerTests extends RestClientTestCase { + + public void testOnSuccess() { + MockResponseListener responseListener = new MockResponseListener(); + RestClient.FailureTrackingResponseListener listener = new RestClient.FailureTrackingResponseListener(responseListener); + + } + + public void testOnFailure() { + MockResponseListener responseListener = new MockResponseListener(); + RestClient.FailureTrackingResponseListener listener = new RestClient.FailureTrackingResponseListener(responseListener); + int numIters = randomIntBetween(1, 10); + Exception[] expectedExceptions = new Exception[numIters]; + for (int i = 0; i < numIters; i++) { + RuntimeException runtimeException = new RuntimeException("test" + i); + expectedExceptions[i] = runtimeException; + listener.trackFailure(runtimeException); + assertNull(responseListener.response.get()); + assertNull(responseListener.exception.get()); + } + + if (randomBoolean()) { + Response response = mockResponse(); + listener.onSuccess(response); + assertSame(response, responseListener.response.get()); + assertNull(responseListener.exception.get()); + } else { + RuntimeException runtimeException = new RuntimeException("definitive"); + listener.onDefinitiveFailure(runtimeException); + assertNull(responseListener.response.get()); + Throwable exception = responseListener.exception.get(); + assertSame(runtimeException, exception); + + int i = numIters - 1; + do { + assertNotNull(exception.getSuppressed()); + assertEquals(1, exception.getSuppressed().length); + assertSame(expectedExceptions[i--], exception.getSuppressed()[0]); + exception = exception.getSuppressed()[0]; + } while(i >= 0); + } + } + + private static class MockResponseListener implements ResponseListener { + private final AtomicReference response = new AtomicReference<>(); + private final AtomicReference exception = new AtomicReference<>(); + + @Override + public void onSuccess(Response response) { + if (this.response.compareAndSet(null, response) == false) { + throw new IllegalStateException("onSuccess was called multiple times"); + } + } + + @Override + public void onFailure(Exception exception) { + if (this.exception.compareAndSet(null, exception) == false) { + throw new IllegalStateException("onFailure was called multiple times"); + } + } + } + + private static Response mockResponse() { + ProtocolVersion protocolVersion = new ProtocolVersion("HTTP", 1, 1); + RequestLine requestLine = new BasicRequestLine("GET", "/", protocolVersion); + StatusLine statusLine = new BasicStatusLine(protocolVersion, 200, "OK"); + HttpResponse httpResponse = new BasicHttpResponse(statusLine); + return new Response(requestLine, new HttpHost("localhost", 9200), httpResponse); + } +} From 4e8ee1f0abec96d929a8e77be3b8672235df4a04 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jul 2016 17:44:50 +0200 Subject: [PATCH 36/93] add some javadocs to clarify internal listeners behaviour --- .../org/elasticsearch/client/RestClient.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index eb51d6982f0..f34ec4e2b85 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -504,6 +504,11 @@ public final class RestClient implements Closeable { } } + /** + * Listener used in any async call to wrap the provided user listener (or SyncResponseListener in sync calls). + * Allows to track potential failures coming from the different retry attempts and returning to the original listener + * only when we got a response (successful or not to be retried) or there are no hosts to retry against. + */ static class FailureTrackingResponseListener { private final ResponseListener responseListener; private volatile Exception exception; @@ -512,20 +517,32 @@ public final class RestClient implements Closeable { this.responseListener = responseListener; } + /** + * Notifies the caller of a response through the wrapped listener + */ void onSuccess(Response response) { responseListener.onSuccess(response); } + /** + * Tracks one last definitive failure and returns to the caller by notifying the wrapped listener + */ void onDefinitiveFailure(Exception exception) { trackFailure(exception); responseListener.onFailure(this.exception); } + /** + * Tracks an exception, which caused a retry hence we should not return yet to the caller + */ void trackFailure(Exception exception) { this.exception = addSuppressedException(this.exception, exception); } } + /** + * Listener used in any sync performRequest calls, it waits for a response or an exception back up to a timeout + */ static class SyncResponseListener implements ResponseListener { private final CountDownLatch latch = new CountDownLatch(1); private final AtomicReference response = new AtomicReference<>(); @@ -559,6 +576,9 @@ public final class RestClient implements Closeable { latch.countDown(); } + /** + * Waits (up to a timeout) for some result of the request: either a response, or an exception. + */ Response get() throws IOException { try { //providing timeout is just a safety measure to prevent everlasting waits From 2b9cfff90f6a11095c36fae0dce35bbe9d0c6f96 Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Fri, 22 Jul 2016 12:25:28 -0400 Subject: [PATCH 37/93] Fixes CORS handling so that it uses the defaults Fixes CORS handling so that it uses the defaults for http.cors.allow-methods and http.cors.allow-headers if none are specified in the config. Closes #19520 --- .../org/elasticsearch/common/Strings.java | 26 +++++++++++++++-- .../http/HttpTransportSettings.java | 4 +-- .../elasticsearch/common/StringsTests.java | 29 +++++++++++++++++++ .../netty3/Netty3HttpServerTransport.java | 12 ++++---- .../netty3/cors/Netty3CorsConfigBuilder.java | 8 ++--- .../Netty3HttpServerTransportTests.java | 16 ++++++++++ 6 files changed, 80 insertions(+), 15 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/Strings.java b/core/src/main/java/org/elasticsearch/common/Strings.java index 63afe9a0323..955b836ca1c 100644 --- a/core/src/main/java/org/elasticsearch/common/Strings.java +++ b/core/src/main/java/org/elasticsearch/common/Strings.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; @@ -509,7 +510,19 @@ public class Strings { else return s.split(","); } + /** + * A convenience method for splitting a delimited string into + * a set and trimming leading and trailing whitespace from all + * split strings. + * + * @param s the string to split + * @param c the delimiter to split on + * @return the set of split strings + */ public static Set splitStringToSet(final String s, final char c) { + if (s == null || s.isEmpty()) { + return Collections.emptySet(); + } final char[] chars = s.toCharArray(); int count = 1; for (final char x : chars) { @@ -521,16 +534,25 @@ public class Strings { final int len = chars.length; int start = 0; // starting index in chars of the current substring. int pos = 0; // current index in chars. + int end = 0; // the position of the end of the current token for (; pos < len; pos++) { if (chars[pos] == c) { - int size = pos - start; + int size = end - start; if (size > 0) { // only add non empty strings result.add(new String(chars, start, size)); } start = pos + 1; + end = start; + } else if (Character.isWhitespace(chars[pos])) { + if (start == pos) { + // skip over preceding whitespace + start++; + } + } else { + end = pos + 1; } } - int size = pos - start; + int size = end - start; if (size > 0) { result.add(new String(chars, start, size)); } diff --git a/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java index 72f8f380df8..60bc3449d0b 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java +++ b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java @@ -40,9 +40,9 @@ public final class HttpTransportSettings { public static final Setting SETTING_CORS_MAX_AGE = Setting.intSetting("http.cors.max-age", 1728000, Property.NodeScope); public static final Setting SETTING_CORS_ALLOW_METHODS = - new Setting<>("http.cors.allow-methods", "OPTIONS, HEAD, GET, POST, PUT, DELETE", (value) -> value, Property.NodeScope); + new Setting<>("http.cors.allow-methods", "OPTIONS,HEAD,GET,POST,PUT,DELETE", (value) -> value, Property.NodeScope); public static final Setting SETTING_CORS_ALLOW_HEADERS = - new Setting<>("http.cors.allow-headers", "X-Requested-With, Content-Type, Content-Length", (value) -> value, Property.NodeScope); + new Setting<>("http.cors.allow-headers", "X-Requested-With,Content-Type,Content-Length", (value) -> value, Property.NodeScope); public static final Setting SETTING_CORS_ALLOW_CREDENTIALS = Setting.boolSetting("http.cors.allow-credentials", false, Property.NodeScope); public static final Setting SETTING_PIPELINING = diff --git a/core/src/test/java/org/elasticsearch/common/StringsTests.java b/core/src/test/java/org/elasticsearch/common/StringsTests.java index 406b4160519..1b987d47796 100644 --- a/core/src/test/java/org/elasticsearch/common/StringsTests.java +++ b/core/src/test/java/org/elasticsearch/common/StringsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.common; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; @@ -73,4 +74,32 @@ public class StringsTests extends ESTestCase { assertThat(toString, containsString("\"ok\":\"here\"")); assertThat(toString, containsString("\"catastrophe\":\"\"")); } + + public void testSplitStringToSet() { + assertEquals(Strings.splitStringByCommaToSet(null), Sets.newHashSet()); + assertEquals(Strings.splitStringByCommaToSet(""), Sets.newHashSet()); + assertEquals(Strings.splitStringByCommaToSet("a,b,c"), Sets.newHashSet("a","b","c")); + assertEquals(Strings.splitStringByCommaToSet("a, b, c"), Sets.newHashSet("a","b","c")); + assertEquals(Strings.splitStringByCommaToSet(" a , b, c "), Sets.newHashSet("a","b","c")); + assertEquals(Strings.splitStringByCommaToSet("aa, bb, cc"), Sets.newHashSet("aa","bb","cc")); + assertEquals(Strings.splitStringByCommaToSet(" a "), Sets.newHashSet("a")); + assertEquals(Strings.splitStringByCommaToSet(" a "), Sets.newHashSet("a")); + assertEquals(Strings.splitStringByCommaToSet(" aa "), Sets.newHashSet("aa")); + assertEquals(Strings.splitStringByCommaToSet(" "), Sets.newHashSet()); + + assertEquals(Strings.splitStringToSet(null, ' '), Sets.newHashSet()); + assertEquals(Strings.splitStringToSet("", ' '), Sets.newHashSet()); + assertEquals(Strings.splitStringToSet("a b c", ' '), Sets.newHashSet("a","b","c")); + assertEquals(Strings.splitStringToSet("a, b, c", ' '), Sets.newHashSet("a,","b,","c")); + assertEquals(Strings.splitStringToSet(" a b c ", ' '), Sets.newHashSet("a","b","c")); + assertEquals(Strings.splitStringToSet(" a b c ", ' '), Sets.newHashSet("a","b","c")); + assertEquals(Strings.splitStringToSet("aa bb cc", ' '), Sets.newHashSet("aa","bb","cc")); + assertEquals(Strings.splitStringToSet(" a ", ' '), Sets.newHashSet("a")); + assertEquals(Strings.splitStringToSet(" a ", ' '), Sets.newHashSet("a")); + assertEquals(Strings.splitStringToSet(" a ", ' '), Sets.newHashSet("a")); + assertEquals(Strings.splitStringToSet("a ", ' '), Sets.newHashSet("a")); + assertEquals(Strings.splitStringToSet(" aa ", ' '), Sets.newHashSet("aa")); + assertEquals(Strings.splitStringToSet("aa ", ' '), Sets.newHashSet("aa")); + assertEquals(Strings.splitStringToSet(" ", ' '), Sets.newHashSet()); + } } diff --git a/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java index c480155dceb..edbcc74e646 100644 --- a/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java @@ -81,9 +81,11 @@ import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static org.elasticsearch.common.settings.Setting.boolSetting; import static org.elasticsearch.common.settings.Setting.byteSizeSetting; @@ -390,14 +392,10 @@ public class Netty3HttpServerTransport extends AbstractLifecycleComponent implem if (SETTING_CORS_ALLOW_CREDENTIALS.get(settings)) { builder.allowCredentials(); } - String[] strMethods = settings.getAsArray(SETTING_CORS_ALLOW_METHODS.getKey()); - HttpMethod[] methods = Arrays.asList(strMethods) - .stream() - .map(HttpMethod::valueOf) - .toArray(size -> new HttpMethod[size]); - return builder.allowedRequestMethods(methods) + Set strMethods = Strings.splitStringByCommaToSet(SETTING_CORS_ALLOW_METHODS.get(settings)); + return builder.allowedRequestMethods(strMethods.stream().map(HttpMethod::valueOf).collect(Collectors.toSet())) .maxAge(SETTING_CORS_MAX_AGE.get(settings)) - .allowedRequestHeaders(settings.getAsArray(SETTING_CORS_ALLOW_HEADERS.getKey())) + .allowedRequestHeaders(Strings.splitStringByCommaToSet(SETTING_CORS_ALLOW_HEADERS.get(settings))) .shortCircuit() .build(); } diff --git a/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/cors/Netty3CorsConfigBuilder.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/cors/Netty3CorsConfigBuilder.java index 947ec86b161..e7b94898b14 100644 --- a/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/cors/Netty3CorsConfigBuilder.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/cors/Netty3CorsConfigBuilder.java @@ -193,8 +193,8 @@ public final class Netty3CorsConfigBuilder { * @param methods the {@link HttpMethod}s that should be allowed. * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public Netty3CorsConfigBuilder allowedRequestMethods(final HttpMethod... methods) { - requestMethods.addAll(Arrays.asList(methods)); + public Netty3CorsConfigBuilder allowedRequestMethods(final Set methods) { + requestMethods.addAll(methods); return this; } @@ -214,8 +214,8 @@ public final class Netty3CorsConfigBuilder { * @param headers the headers to be added to the preflight 'Access-Control-Allow-Headers' response header. * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public Netty3CorsConfigBuilder allowedRequestHeaders(final String... headers) { - requestHeaders.addAll(Arrays.asList(headers)); + public Netty3CorsConfigBuilder allowedRequestHeaders(final Set headers) { + requestHeaders.addAll(headers); return this; } diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerTransportTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerTransportTests.java index da7e320a557..901d517bf95 100644 --- a/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerTransportTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerTransportTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.http.netty3.cors.Netty3CorsConfig; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; @@ -86,4 +87,19 @@ public class Netty3HttpServerTransportTests extends ESTestCase { assertThat(corsConfig.allowedRequestMethods().stream().map(HttpMethod::getName).collect(Collectors.toSet()), equalTo(methods)); transport.close(); } + + public void testCorsConfigDefaults() { + final Set headers = Sets.newHashSet("X-Requested-With", "Content-Type", "Content-Length"); + final Set methods = Sets.newHashSet("OPTIONS", "HEAD", "GET", "POST", "PUT", "DELETE"); + final Settings settings = Settings.builder() + .put(SETTING_CORS_ENABLED.getKey(), true) + .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "*") + .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true) + .build(); + final Netty3HttpServerTransport transport = new Netty3HttpServerTransport(settings, networkService, bigArrays, threadPool); + final Netty3CorsConfig corsConfig = transport.getCorsConfig(); + assertThat(corsConfig.allowedRequestHeaders(), equalTo(headers)); + assertThat(corsConfig.allowedRequestMethods().stream().map(HttpMethod::getName).collect(Collectors.toSet()), equalTo(methods)); + transport.close(); + } } From 4cb8b620c37decb69f31c92d08de765db1ec828e Mon Sep 17 00:00:00 2001 From: Michael Nitschinger Date: Fri, 22 Jul 2016 18:33:21 +0200 Subject: [PATCH 38/93] Allow to listen on virtual interfaces Previously when trying to listen on virtual interfaces during bootstrap the application would stop working - the interface couldn't be found by the NetworkUtils class. The NetworkUtils utilize the underlying JDK NetworkInterface class which, when asked to lookup by name only takes physical interfaces into account, failing at virtual (or subinterfaces) ones (returning null). Note that when interating over all interfaces, both physical and virtual ones are taken into account. This changeset asks for all known interfaces, iterates over them and matches on the given name as part of the loop, allowing it to catch both physical and virtual interfaces. As a result, elasticsearch can now also serve on virtual interfaces. A test case has been added which at least makes sure that all iterable interfaces can be found by their respective name. (It's not easily possible in a unit test to "fake" virtual interfaces). Relates #19537 --- .../common/network/NetworkUtils.java | 9 ++++++++- .../common/network/NetworkUtilsTests.java | 17 +++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkUtils.java b/core/src/main/java/org/elasticsearch/common/network/NetworkUtils.java index 8652d4c5c05..e15073e25ce 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkUtils.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkUtils.java @@ -227,7 +227,14 @@ public abstract class NetworkUtils { /** Returns addresses for the given interface (it must be marked up) */ static InetAddress[] getAddressesForInterface(String name) throws SocketException { - NetworkInterface intf = NetworkInterface.getByName(name); + NetworkInterface intf = null; + for (NetworkInterface networkInterface : getInterfaces()) { + if (name.equals(networkInterface.getName())) { + intf = networkInterface; + break; + } + } + if (intf == null) { throw new IllegalArgumentException("No interface named '" + name + "' found, got " + getInterfaces()); } diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java index e5b95f258a3..ce76f8ef6d3 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java @@ -22,6 +22,10 @@ package org.elasticsearch.common.network; import org.elasticsearch.test.ESTestCase; import java.net.InetAddress; +import java.net.NetworkInterface; +import java.util.Arrays; +import java.util.Collections; +import java.util.Enumeration; /** * Tests for network utils. Please avoid using any methods that cause DNS lookups! @@ -74,4 +78,17 @@ public class NetworkUtilsTests extends ESTestCase { assertArrayEquals(new InetAddress[] { InetAddress.getByName("127.0.0.1") }, NetworkUtils.filterIPV4(addresses)); assertArrayEquals(new InetAddress[] { InetAddress.getByName("::1") }, NetworkUtils.filterIPV6(addresses)); } + + /** + * Test that selecting by name is possible and properly matches the addresses on all interfaces and virtual + * interfaces. + */ + public void testAddressInterfaceLookup() throws Exception { + for (NetworkInterface netIf : NetworkUtils.getInterfaces()) { + String name = netIf.getName(); + InetAddress[] expectedAddresses = Collections.list(netIf.getInetAddresses()).toArray(new InetAddress[0]); + InetAddress[] foundAddresses = NetworkUtils.getAddressesForInterface(name); + assertArrayEquals(expectedAddresses, foundAddresses); + } + } } From e6054a931e94d78684e118f415fc506d5514de88 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jul 2016 19:01:56 +0200 Subject: [PATCH 39/93] add async request unit test --- .../client/RestClientIntegTests.java | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java index ceaccad846d..55f96bce508 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java @@ -48,11 +48,14 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import static org.elasticsearch.client.RestClientTestUtil.getAllStatusCodes; import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods; import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; @@ -215,4 +218,34 @@ public class RestClientIntegTests extends RestClientTestCase { assertEquals(statusCode, esResponse.getStatusLine().getStatusCode()); assertEquals(requestBody, EntityUtils.toString(esResponse.getEntity())); } + + public void testAsyncRequests() throws Exception { + int numRequests = randomIntBetween(2, 10); + final CountDownLatch latch = new CountDownLatch(numRequests); + for (int i = 0; i < numRequests; i++) { + final String method = RestClientTestUtil.randomHttpMethod(getRandom()); + final int statusCode = randomStatusCode(getRandom()); + restClient.performRequest(method, "/" + statusCode, new ResponseListener() { + @Override + public void onSuccess(Response response) { + latch.countDown(); + assertResponse(response); + } + + @Override + public void onFailure(Exception exception) { + latch.countDown(); + assertThat(exception, instanceOf(ResponseException.class)); + ResponseException responseException = (ResponseException) exception; + assertResponse(responseException.getResponse()); + } + + private void assertResponse(Response response) { + assertEquals(method, response.getRequestLine().getMethod()); + assertEquals(statusCode, response.getStatusLine().getStatusCode()); + } + }); + } + assertTrue(latch.await(5, TimeUnit.SECONDS)); + } } From c27237be9fc70b077164e22705a17d25ac1e5d9f Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 22 Jul 2016 13:30:05 -0400 Subject: [PATCH 40/93] Revert "Allow to listen on virtual interfaces" This reverts commit 4cb8b620c37decb69f31c92d08de765db1ec828e. --- .../common/network/NetworkUtils.java | 9 +-------- .../common/network/NetworkUtilsTests.java | 17 ----------------- 2 files changed, 1 insertion(+), 25 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkUtils.java b/core/src/main/java/org/elasticsearch/common/network/NetworkUtils.java index e15073e25ce..8652d4c5c05 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkUtils.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkUtils.java @@ -227,14 +227,7 @@ public abstract class NetworkUtils { /** Returns addresses for the given interface (it must be marked up) */ static InetAddress[] getAddressesForInterface(String name) throws SocketException { - NetworkInterface intf = null; - for (NetworkInterface networkInterface : getInterfaces()) { - if (name.equals(networkInterface.getName())) { - intf = networkInterface; - break; - } - } - + NetworkInterface intf = NetworkInterface.getByName(name); if (intf == null) { throw new IllegalArgumentException("No interface named '" + name + "' found, got " + getInterfaces()); } diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java index ce76f8ef6d3..e5b95f258a3 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java @@ -22,10 +22,6 @@ package org.elasticsearch.common.network; import org.elasticsearch.test.ESTestCase; import java.net.InetAddress; -import java.net.NetworkInterface; -import java.util.Arrays; -import java.util.Collections; -import java.util.Enumeration; /** * Tests for network utils. Please avoid using any methods that cause DNS lookups! @@ -78,17 +74,4 @@ public class NetworkUtilsTests extends ESTestCase { assertArrayEquals(new InetAddress[] { InetAddress.getByName("127.0.0.1") }, NetworkUtils.filterIPV4(addresses)); assertArrayEquals(new InetAddress[] { InetAddress.getByName("::1") }, NetworkUtils.filterIPV6(addresses)); } - - /** - * Test that selecting by name is possible and properly matches the addresses on all interfaces and virtual - * interfaces. - */ - public void testAddressInterfaceLookup() throws Exception { - for (NetworkInterface netIf : NetworkUtils.getInterfaces()) { - String name = netIf.getName(); - InetAddress[] expectedAddresses = Collections.list(netIf.getInetAddresses()).toArray(new InetAddress[0]); - InetAddress[] foundAddresses = NetworkUtils.getAddressesForInterface(name); - assertArrayEquals(expectedAddresses, foundAddresses); - } - } } From 46cb3f36ffa9b94353ed70c2c14b2a107d342f9e Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jul 2016 19:03:45 +0200 Subject: [PATCH 41/93] fix concurrency bug when getting the host for a given request It can happen that the list of healthy hosts is empty, then we get one from the blacklist. but some other operation might have sneaked in and emptied the blacklist in the meantime, so we have to retry till we manage to get some host, either from the healthy list or from the blacklist. --- .../org/elasticsearch/client/RestClient.java | 52 +++++++++++-------- 1 file changed, 29 insertions(+), 23 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index f34ec4e2b85..9428b53085c 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -46,6 +46,7 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; @@ -373,30 +374,35 @@ public final class RestClient implements Closeable { * In case there are no healthy hosts available, or dead ones to be be retried, one dead host gets returned. */ private Iterable nextHost() { - Set filteredHosts = new HashSet<>(hosts); - for (Map.Entry entry : blacklist.entrySet()) { - if (System.nanoTime() - entry.getValue().getDeadUntilNanos() < 0) { - filteredHosts.remove(entry.getKey()); - } - } - - if (filteredHosts.isEmpty()) { - //last resort: if there are no good hosts to use, return a single dead one, the one that's closest to being retried - List> sortedHosts = new ArrayList<>(blacklist.entrySet()); - Collections.sort(sortedHosts, new Comparator>() { - @Override - public int compare(Map.Entry o1, Map.Entry o2) { - return Long.compare(o1.getValue().getDeadUntilNanos(), o2.getValue().getDeadUntilNanos()); + Collection nextHosts = Collections.emptySet(); + do { + Set filteredHosts = new HashSet<>(hosts); + for (Map.Entry entry : blacklist.entrySet()) { + if (System.nanoTime() - entry.getValue().getDeadUntilNanos() < 0) { + filteredHosts.remove(entry.getKey()); } - }); - HttpHost deadHost = sortedHosts.get(0).getKey(); - logger.trace("resurrecting host [" + deadHost + "]"); - return Collections.singleton(deadHost); - } - - List rotatedHosts = new ArrayList<>(filteredHosts); - Collections.rotate(rotatedHosts, rotatedHosts.size() - lastHostIndex.getAndIncrement()); - return rotatedHosts; + } + if (filteredHosts.isEmpty()) { + //last resort: if there are no good hosts to use, return a single dead one, the one that's closest to being retried + List> sortedHosts = new ArrayList<>(blacklist.entrySet()); + if (sortedHosts.size() > 0) { + Collections.sort(sortedHosts, new Comparator>() { + @Override + public int compare(Map.Entry o1, Map.Entry o2) { + return Long.compare(o1.getValue().getDeadUntilNanos(), o2.getValue().getDeadUntilNanos()); + } + }); + HttpHost deadHost = sortedHosts.get(0).getKey(); + logger.trace("resurrecting host [" + deadHost + "]"); + nextHosts = Collections.singleton(deadHost); + } + } else { + List rotatedHosts = new ArrayList<>(filteredHosts); + Collections.rotate(rotatedHosts, rotatedHosts.size() - lastHostIndex.getAndIncrement()); + nextHosts = rotatedHosts; + } + } while(nextHosts.isEmpty()); + return nextHosts; } /** From c9c7af791c2bae98cda969f3a8e80c5f3882b195 Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jul 2016 21:42:42 +0200 Subject: [PATCH 42/93] update nextHost method javadocs --- .../java/org/elasticsearch/client/RestClient.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 9428b53085c..84863580bb1 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -366,12 +366,12 @@ public final class RestClient implements Closeable { } /** - * Returns an iterator of hosts to be used for a request call. - * Ideally, the first host is retrieved from the iterator and used successfully for the request. - * Otherwise, after each failure the next host should be retrieved from the iterator so that the request can be retried until - * the iterator is exhausted. The maximum total of attempts is equal to the number of hosts that are available in the iterator. - * The iterator returned will never be empty, rather an {@link IllegalStateException} in case there are no hosts. - * In case there are no healthy hosts available, or dead ones to be be retried, one dead host gets returned. + * Returns an {@link Iterable} of hosts to be used for a request call. + * Ideally, the first host is retrieved from the iterable and used successfully for the request. + * Otherwise, after each failure the next host has to be retrieved from the iterator so that the request can be retried until + * there are no more hosts available to retry against. The maximum total of attempts is equal to the number of hosts in the iterable. + * The iterator returned will never be empty. In case there are no healthy hosts available, or dead ones to be be retried, + * one dead host gets returned so that it can be retried. */ private Iterable nextHost() { Collection nextHosts = Collections.emptySet(); From 061ea1bd8ce5095d974b8ba13c32d61b772fc7cc Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 22 Jul 2016 22:19:59 +0200 Subject: [PATCH 43/93] [TEST] move assertions outside of listener in testAsyncRequests for clearer test failures --- .../client/RestClientIntegTests.java | 45 ++++++++++++++----- 1 file changed, 34 insertions(+), 11 deletions(-) diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java index 55f96bce508..455affea9d5 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientIntegTests.java @@ -48,6 +48,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -55,7 +56,6 @@ import static org.elasticsearch.client.RestClientTestUtil.getAllStatusCodes; import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods; import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode; import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; @@ -220,32 +220,55 @@ public class RestClientIntegTests extends RestClientTestCase { } public void testAsyncRequests() throws Exception { - int numRequests = randomIntBetween(2, 10); + int numRequests = randomIntBetween(5, 20); final CountDownLatch latch = new CountDownLatch(numRequests); + final List responses = new CopyOnWriteArrayList<>(); for (int i = 0; i < numRequests; i++) { final String method = RestClientTestUtil.randomHttpMethod(getRandom()); final int statusCode = randomStatusCode(getRandom()); restClient.performRequest(method, "/" + statusCode, new ResponseListener() { @Override public void onSuccess(Response response) { + responses.add(new TestResponse(method, statusCode, response)); latch.countDown(); - assertResponse(response); } @Override public void onFailure(Exception exception) { + responses.add(new TestResponse(method, statusCode, exception)); latch.countDown(); - assertThat(exception, instanceOf(ResponseException.class)); - ResponseException responseException = (ResponseException) exception; - assertResponse(responseException.getResponse()); - } - - private void assertResponse(Response response) { - assertEquals(method, response.getRequestLine().getMethod()); - assertEquals(statusCode, response.getStatusLine().getStatusCode()); } }); } assertTrue(latch.await(5, TimeUnit.SECONDS)); + + assertEquals(numRequests, responses.size()); + for (TestResponse response : responses) { + assertEquals(response.method, response.getResponse().getRequestLine().getMethod()); + assertEquals(response.statusCode, response.getResponse().getStatusLine().getStatusCode()); + + } + } + + private static class TestResponse { + private final String method; + private final int statusCode; + private final Object response; + + TestResponse(String method, int statusCode, Object response) { + this.method = method; + this.statusCode = statusCode; + this.response = response; + } + + Response getResponse() { + if (response instanceof Response) { + return (Response) response; + } + if (response instanceof ResponseException) { + return ((ResponseException) response).getResponse(); + } + throw new AssertionError("unexpected response " + response.getClass()); + } } } From 0216cef7bd51ba82b3bae94883399a529a19de9e Mon Sep 17 00:00:00 2001 From: Alexander Kazakov Date: Tue, 31 May 2016 18:01:49 +0200 Subject: [PATCH 44/93] Fix EC2 discovery setting Closes #18652 --- .../java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java index 649f84b7aed..5c7e6653f52 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java @@ -114,7 +114,7 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent implements Aws AWSCredentialsProvider credentials; - if (key == null && secret == null) { + if (key.isEmpty() && secret.isEmpty()) { credentials = new AWSCredentialsProviderChain( new EnvironmentVariableCredentialsProvider(), new SystemPropertiesCredentialsProvider(), From 0578925423da5bebfdaeebcf8483facf53ab579f Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 1 Jun 2016 22:44:39 +0200 Subject: [PATCH 45/93] Fix ec2 settings Follow up for #18662 We add some tests to check that settings are correctly applied. Tests revealed that some checks were missing. But we ignore `testAWSCredentialsWithSystemProviders` test for now. --- plugins/discovery-ec2/build.gradle | 6 +- .../cloud/aws/AwsEc2ServiceImpl.java | 171 +++++++++++------- .../cloud/aws/AwsEc2ServiceImplTests.java | 169 +++++++++++++++++ 3 files changed, 279 insertions(+), 67 deletions(-) create mode 100644 plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImplTests.java diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 15cce1aa32b..a31dcde20a0 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -44,7 +44,11 @@ dependencyLicenses { test { // this is needed for insecure plugins, remove if possible! - systemProperty 'tests.artifact', project.name + systemProperty 'tests.artifact', project.name + // this could be needed by AwsEc2ServiceImplTests#testAWSCredentialsWithSystemProviders() + // As it's marked as Ignored for now, we can comment those + // systemProperty 'aws.accessKeyId', 'DUMMY_ACCESS_KEY' + // systemProperty 'aws.secretKey', 'DUMMY_SECRET_KEY' } thirdPartyAudit.excludes = [ diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java index 5c7e6653f52..b51c9c03f6e 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; @@ -66,15 +67,45 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent implements Aws return client; } + this.client = new AmazonEC2Client(buildCredentials(logger, settings), buildConfiguration(logger, settings)); + String endpoint = findEndpoint(logger, settings); + if (endpoint != null) { + client.setEndpoint(endpoint); + } + + return this.client; + } + + protected static AWSCredentialsProvider buildCredentials(ESLogger logger, Settings settings) { + AWSCredentialsProvider credentials; + + String key = CLOUD_EC2.KEY_SETTING.get(settings); + String secret = CLOUD_EC2.SECRET_SETTING.get(settings); + if (key.isEmpty() && secret.isEmpty()) { + logger.debug("Using either environment variables, system properties or instance profile credentials"); + credentials = new AWSCredentialsProviderChain( + new EnvironmentVariableCredentialsProvider(), + new SystemPropertiesCredentialsProvider(), + new InstanceProfileCredentialsProvider() + ); + } else { + logger.debug("Using basic key/secret credentials"); + credentials = new AWSCredentialsProviderChain( + new StaticCredentialsProvider(new BasicAWSCredentials(key, secret)) + ); + } + + return credentials; + } + + protected static ClientConfiguration buildConfiguration(ESLogger logger, Settings settings) { ClientConfiguration clientConfiguration = new ClientConfiguration(); // the response metadata cache is only there for diagnostics purposes, // but can force objects from every response to the old generation. clientConfiguration.setResponseMetadataCacheSize(0); clientConfiguration.setProtocol(CLOUD_EC2.PROTOCOL_SETTING.get(settings)); - String key = CLOUD_EC2.KEY_SETTING.get(settings); - String secret = CLOUD_EC2.SECRET_SETTING.get(settings); - if (CLOUD_EC2.PROXY_HOST_SETTING.exists(settings)) { + if (PROXY_HOST_SETTING.exists(settings) || CLOUD_EC2.PROXY_HOST_SETTING.exists(settings)) { String proxyHost = CLOUD_EC2.PROXY_HOST_SETTING.get(settings); Integer proxyPort = CLOUD_EC2.PROXY_PORT_SETTING.get(settings); String proxyUsername = CLOUD_EC2.PROXY_USERNAME_SETTING.get(settings); @@ -97,78 +128,86 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent implements Aws // Increase the number of retries in case of 5xx API responses final Random rand = Randomness.get(); RetryPolicy retryPolicy = new RetryPolicy( - RetryPolicy.RetryCondition.NO_RETRY_CONDITION, - new RetryPolicy.BackoffStrategy() { - @Override - public long delayBeforeNextRetry(AmazonWebServiceRequest originalRequest, - AmazonClientException exception, - int retriesAttempted) { - // with 10 retries the max delay time is 320s/320000ms (10 * 2^5 * 1 * 1000) - logger.warn("EC2 API request failed, retry again. Reason was:", exception); - return 1000L * (long) (10d * Math.pow(2, retriesAttempted / 2.0d) * (1.0d + rand.nextDouble())); - } - }, - 10, - false); + RetryPolicy.RetryCondition.NO_RETRY_CONDITION, + new RetryPolicy.BackoffStrategy() { + @Override + public long delayBeforeNextRetry(AmazonWebServiceRequest originalRequest, + AmazonClientException exception, + int retriesAttempted) { + // with 10 retries the max delay time is 320s/320000ms (10 * 2^5 * 1 * 1000) + logger.warn("EC2 API request failed, retry again. Reason was:", exception); + return 1000L * (long) (10d * Math.pow(2, retriesAttempted / 2.0d) * (1.0d + rand.nextDouble())); + } + }, + 10, + false); clientConfiguration.setRetryPolicy(retryPolicy); - AWSCredentialsProvider credentials; - - if (key.isEmpty() && secret.isEmpty()) { - credentials = new AWSCredentialsProviderChain( - new EnvironmentVariableCredentialsProvider(), - new SystemPropertiesCredentialsProvider(), - new InstanceProfileCredentialsProvider() - ); - } else { - credentials = new AWSCredentialsProviderChain( - new StaticCredentialsProvider(new BasicAWSCredentials(key, secret)) - ); - } - - this.client = new AmazonEC2Client(credentials, clientConfiguration); + return clientConfiguration; + } + protected static String findEndpoint(ESLogger logger, Settings settings) { + String endpoint = null; if (CLOUD_EC2.ENDPOINT_SETTING.exists(settings)) { - final String endpoint = CLOUD_EC2.ENDPOINT_SETTING.get(settings); + endpoint = CLOUD_EC2.ENDPOINT_SETTING.get(settings); logger.debug("using explicit ec2 endpoint [{}]", endpoint); - client.setEndpoint(endpoint); - } else if (CLOUD_EC2.REGION_SETTING.exists(settings)) { + } else if (REGION_SETTING.exists(settings) || CLOUD_EC2.REGION_SETTING.exists(settings)) { final String region = CLOUD_EC2.REGION_SETTING.get(settings); - final String endpoint; - if (region.equals("us-east-1") || region.equals("us-east")) { - endpoint = "ec2.us-east-1.amazonaws.com"; - } else if (region.equals("us-west") || region.equals("us-west-1")) { - endpoint = "ec2.us-west-1.amazonaws.com"; - } else if (region.equals("us-west-2")) { - endpoint = "ec2.us-west-2.amazonaws.com"; - } else if (region.equals("ap-southeast") || region.equals("ap-southeast-1")) { - endpoint = "ec2.ap-southeast-1.amazonaws.com"; - } else if (region.equals("us-gov-west") || region.equals("us-gov-west-1")) { - endpoint = "ec2.us-gov-west-1.amazonaws.com"; - } else if (region.equals("ap-south-1")) { - endpoint = "ec2.ap-south-1.amazonaws.com"; - } else if (region.equals("ap-southeast-2")) { - endpoint = "ec2.ap-southeast-2.amazonaws.com"; - } else if (region.equals("ap-northeast") || region.equals("ap-northeast-1")) { - endpoint = "ec2.ap-northeast-1.amazonaws.com"; - } else if (region.equals("ap-northeast-2")) { - endpoint = "ec2.ap-northeast-2.amazonaws.com"; - } else if (region.equals("eu-west") || region.equals("eu-west-1")) { - endpoint = "ec2.eu-west-1.amazonaws.com"; - } else if (region.equals("eu-central") || region.equals("eu-central-1")) { - endpoint = "ec2.eu-central-1.amazonaws.com"; - } else if (region.equals("sa-east") || region.equals("sa-east-1")) { - endpoint = "ec2.sa-east-1.amazonaws.com"; - } else if (region.equals("cn-north") || region.equals("cn-north-1")) { - endpoint = "ec2.cn-north-1.amazonaws.com.cn"; - } else { - throw new IllegalArgumentException("No automatic endpoint could be derived from region [" + region + "]"); + switch (region) { + case "us-east-1": + case "us-east": + endpoint = "ec2.us-east-1.amazonaws.com"; + break; + case "us-west": + case "us-west-1": + endpoint = "ec2.us-west-1.amazonaws.com"; + break; + case "us-west-2": + endpoint = "ec2.us-west-2.amazonaws.com"; + break; + case "ap-southeast": + case "ap-southeast-1": + endpoint = "ec2.ap-southeast-1.amazonaws.com"; + break; + case "ap-south-1": + endpoint = "ec2.ap-south-1.amazonaws.com"; + break; + case "us-gov-west": + case "us-gov-west-1": + endpoint = "ec2.us-gov-west-1.amazonaws.com"; + break; + case "ap-southeast-2": + endpoint = "ec2.ap-southeast-2.amazonaws.com"; + break; + case "ap-northeast": + case "ap-northeast-1": + endpoint = "ec2.ap-northeast-1.amazonaws.com"; + break; + case "ap-northeast-2": + endpoint = "ec2.ap-northeast-2.amazonaws.com"; + break; + case "eu-west": + case "eu-west-1": + endpoint = "ec2.eu-west-1.amazonaws.com"; + break; + case "eu-central": + case "eu-central-1": + endpoint = "ec2.eu-central-1.amazonaws.com"; + break; + case "sa-east": + case "sa-east-1": + endpoint = "ec2.sa-east-1.amazonaws.com"; + break; + case "cn-north": + case "cn-north-1": + endpoint = "ec2.cn-north-1.amazonaws.com.cn"; + break; + default: + throw new IllegalArgumentException("No automatic endpoint could be derived from region [" + region + "]"); } logger.debug("using ec2 region [{}], with endpoint [{}]", region, endpoint); - client.setEndpoint(endpoint); } - - return this.client; + return endpoint; } @Override diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImplTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImplTests.java new file mode 100644 index 00000000000..7ee82516926 --- /dev/null +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImplTests.java @@ -0,0 +1,169 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cloud.aws; + +import com.amazonaws.ClientConfiguration; +import com.amazonaws.Protocol; +import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSCredentialsProvider; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public class AwsEc2ServiceImplTests extends ESTestCase { + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/19556") + public void testAWSCredentialsWithSystemProviders() { + AWSCredentialsProvider credentialsProvider = AwsEc2ServiceImpl.buildCredentials(logger, Settings.EMPTY); + + AWSCredentials credentials = credentialsProvider.getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is("DUMMY_ACCESS_KEY")); + assertThat(credentials.getAWSSecretKey(), is("DUMMY_SECRET_KEY")); + } + + public void testAWSCredentialsWithElasticsearchAwsSettings() { + Settings settings = Settings.builder() + .put(AwsEc2Service.KEY_SETTING.getKey(), "aws_key") + .put(AwsEc2Service.SECRET_SETTING.getKey(), "aws_secret") + .build(); + launchAWSCredentialsWithElasticsearchSettingsTest(settings, "aws_key", "aws_secret"); + } + + public void testAWSCredentialsWithElasticsearchEc2Settings() { + Settings settings = Settings.builder() + .put(AwsEc2Service.CLOUD_EC2.KEY_SETTING.getKey(), "ec2_key") + .put(AwsEc2Service.CLOUD_EC2.SECRET_SETTING.getKey(), "ec2_secret") + .build(); + launchAWSCredentialsWithElasticsearchSettingsTest(settings, "ec2_key", "ec2_secret"); + } + + public void testAWSCredentialsWithElasticsearchAwsAndEc2Settings() { + Settings settings = Settings.builder() + .put(AwsEc2Service.KEY_SETTING.getKey(), "aws_key") + .put(AwsEc2Service.SECRET_SETTING.getKey(), "aws_secret") + .put(AwsEc2Service.CLOUD_EC2.KEY_SETTING.getKey(), "ec2_key") + .put(AwsEc2Service.CLOUD_EC2.SECRET_SETTING.getKey(), "ec2_secret") + .build(); + launchAWSCredentialsWithElasticsearchSettingsTest(settings, "ec2_key", "ec2_secret"); + } + + protected void launchAWSCredentialsWithElasticsearchSettingsTest(Settings settings, String expectedKey, String expectedSecret) { + AWSCredentials credentials = AwsEc2ServiceImpl.buildCredentials(logger, settings).getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is(expectedKey)); + assertThat(credentials.getAWSSecretKey(), is(expectedSecret)); + } + + public void testAWSDefaultConfiguration() { + launchAWSConfigurationTest(Settings.EMPTY, Protocol.HTTPS, null, -1, null, null, null); + } + + public void testAWSConfigurationWithAwsSettings() { + Settings settings = Settings.builder() + .put(AwsEc2Service.PROTOCOL_SETTING.getKey(), "http") + .put(AwsEc2Service.PROXY_HOST_SETTING.getKey(), "aws_proxy_host") + .put(AwsEc2Service.PROXY_PORT_SETTING.getKey(), 8080) + .put(AwsEc2Service.PROXY_USERNAME_SETTING.getKey(), "aws_proxy_username") + .put(AwsEc2Service.PROXY_PASSWORD_SETTING.getKey(), "aws_proxy_password") + .put(AwsEc2Service.SIGNER_SETTING.getKey(), "AWS3SignerType") + .build(); + launchAWSConfigurationTest(settings, Protocol.HTTP, "aws_proxy_host", 8080, "aws_proxy_username", "aws_proxy_password", + "AWS3SignerType"); + } + + public void testAWSConfigurationWithAwsAndEc2Settings() { + Settings settings = Settings.builder() + .put(AwsEc2Service.PROTOCOL_SETTING.getKey(), "http") + .put(AwsEc2Service.PROXY_HOST_SETTING.getKey(), "aws_proxy_host") + .put(AwsEc2Service.PROXY_PORT_SETTING.getKey(), 8080) + .put(AwsEc2Service.PROXY_USERNAME_SETTING.getKey(), "aws_proxy_username") + .put(AwsEc2Service.PROXY_PASSWORD_SETTING.getKey(), "aws_proxy_password") + .put(AwsEc2Service.SIGNER_SETTING.getKey(), "AWS3SignerType") + .put(AwsEc2Service.CLOUD_EC2.PROTOCOL_SETTING.getKey(), "https") + .put(AwsEc2Service.CLOUD_EC2.PROXY_HOST_SETTING.getKey(), "ec2_proxy_host") + .put(AwsEc2Service.CLOUD_EC2.PROXY_PORT_SETTING.getKey(), 8081) + .put(AwsEc2Service.CLOUD_EC2.PROXY_USERNAME_SETTING.getKey(), "ec2_proxy_username") + .put(AwsEc2Service.CLOUD_EC2.PROXY_PASSWORD_SETTING.getKey(), "ec2_proxy_password") + .put(AwsEc2Service.CLOUD_EC2.SIGNER_SETTING.getKey(), "NoOpSignerType") + .build(); + launchAWSConfigurationTest(settings, Protocol.HTTPS, "ec2_proxy_host", 8081, "ec2_proxy_username", "ec2_proxy_password", + "NoOpSignerType"); + } + + protected void launchAWSConfigurationTest(Settings settings, + Protocol expectedProtocol, + String expectedProxyHost, + int expectedProxyPort, + String expectedProxyUsername, + String expectedProxyPassword, + String expectedSigner) { + ClientConfiguration configuration = AwsEc2ServiceImpl.buildConfiguration(logger, settings); + + assertThat(configuration.getResponseMetadataCacheSize(), is(0)); + assertThat(configuration.getProtocol(), is(expectedProtocol)); + assertThat(configuration.getProxyHost(), is(expectedProxyHost)); + assertThat(configuration.getProxyPort(), is(expectedProxyPort)); + assertThat(configuration.getProxyUsername(), is(expectedProxyUsername)); + assertThat(configuration.getProxyPassword(), is(expectedProxyPassword)); + assertThat(configuration.getSignerOverride(), is(expectedSigner)); + } + + public void testDefaultEndpoint() { + String endpoint = AwsEc2ServiceImpl.findEndpoint(logger, Settings.EMPTY); + assertThat(endpoint, nullValue()); + } + + public void testSpecificEndpoint() { + Settings settings = Settings.builder() + .put(AwsEc2Service.CLOUD_EC2.ENDPOINT_SETTING.getKey(), "ec2.endpoint") + .build(); + String endpoint = AwsEc2ServiceImpl.findEndpoint(logger, settings); + assertThat(endpoint, is("ec2.endpoint")); + } + + public void testRegionWithAwsSettings() { + Settings settings = Settings.builder() + .put(AwsEc2Service.REGION_SETTING.getKey(), randomFrom("eu-west", "eu-west-1")) + .build(); + String endpoint = AwsEc2ServiceImpl.findEndpoint(logger, settings); + assertThat(endpoint, is("ec2.eu-west-1.amazonaws.com")); + } + + public void testRegionWithAwsAndEc2Settings() { + Settings settings = Settings.builder() + .put(AwsEc2Service.REGION_SETTING.getKey(), randomFrom("eu-west", "eu-west-1")) + .put(AwsEc2Service.CLOUD_EC2.REGION_SETTING.getKey(), randomFrom("us-west", "us-west-1")) + .build(); + String endpoint = AwsEc2ServiceImpl.findEndpoint(logger, settings); + assertThat(endpoint, is("ec2.us-west-1.amazonaws.com")); + } + + public void testInvalidRegion() { + Settings settings = Settings.builder() + .put(AwsEc2Service.REGION_SETTING.getKey(), "does-not-exist") + .build(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { + AwsEc2ServiceImpl.findEndpoint(logger, settings); + }); + assertThat(e.getMessage(), containsString("No automatic endpoint could be derived from region")); + } +} From 7aa4568a9c976ab9c69aefbaa61e9c809449e2e4 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Thu, 2 Jun 2016 08:53:35 +0200 Subject: [PATCH 46/93] Fix s3 settings Follow up for #18662 and #18690. * For consistency, we rename method parameters and use `key` and `secret` instead of `account` and `key`. * We add some tests to check that settings are correctly applied. * Tests revealed that some checks are bad like for #18662. Add test and fix issue for getting the right S3 endpoint Test when Repository, Repositories or global settings are defined But ignore testAWSCredentialsWithSystemProviders test Add tests for AWS Client Configuration Fix NPE when no region is set We used to transform region="" to region=null but it's not needed anymore and would actually cause NPE from now. --- plugins/repository-s3/build.gradle | 6 +- .../cloud/aws/InternalAwsS3Service.java | 115 ++++--- .../repositories/s3/S3Repository.java | 5 - .../cloud/aws/AwsS3ServiceImplTests.java | 313 ++++++++++++++++++ 4 files changed, 381 insertions(+), 58 deletions(-) create mode 100644 plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AwsS3ServiceImplTests.java diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index f713d8eadc5..23aa68e7f2d 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -50,7 +50,11 @@ dependencyLicenses { test { // this is needed for insecure plugins, remove if possible! - systemProperty 'tests.artifact', project.name + systemProperty 'tests.artifact', project.name + // this could be needed by AwsS3ServiceImplTests#testAWSCredentialsWithSystemProviders() + // As it's marked as Ignored for now, we can comment those + // systemProperty 'aws.accessKeyId', 'DUMMY_ACCESS_KEY' + // systemProperty 'aws.secretKey', 'DUMMY_SECRET_KEY' } thirdPartyAudit.excludes = [ diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java index 36d383d865c..e1bce876c27 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import java.util.HashMap; @@ -57,30 +58,33 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent implements } @Override - public synchronized AmazonS3 client(String endpoint, Protocol protocol, String region, String account, String key, Integer maxRetries, + public synchronized AmazonS3 client(String endpoint, Protocol protocol, String region, String key, String secret, Integer maxRetries, boolean useThrottleRetries, Boolean pathStyleAccess) { - if (Strings.isNullOrEmpty(endpoint)) { - // We need to set the endpoint based on the region - if (region != null) { - endpoint = getEndpoint(region); - logger.debug("using s3 region [{}], with endpoint [{}]", region, endpoint); - } else { - // No region has been set so we will use the default endpoint - endpoint = getDefaultEndpoint(); - } - } - - return getClient(endpoint, protocol, account, key, maxRetries, useThrottleRetries, pathStyleAccess); - } - - private synchronized AmazonS3 getClient(String endpoint, Protocol protocol, String account, String key, Integer maxRetries, - boolean useThrottleRetries, Boolean pathStyleAccess) { - Tuple clientDescriptor = new Tuple<>(endpoint, account); + String foundEndpoint = findEndpoint(logger, settings, endpoint, region); + Tuple clientDescriptor = new Tuple<>(foundEndpoint, key); AmazonS3Client client = clients.get(clientDescriptor); if (client != null) { return client; } + client = new AmazonS3Client( + buildCredentials(logger, key, secret), + buildConfiguration(logger, settings, protocol, maxRetries, foundEndpoint, useThrottleRetries)); + + if (pathStyleAccess != null) { + client.setS3ClientOptions(new S3ClientOptions().withPathStyleAccess(pathStyleAccess)); + } + + if (!foundEndpoint.isEmpty()) { + client.setEndpoint(foundEndpoint); + } + + clients.put(clientDescriptor, client); + return client; + } + + public static ClientConfiguration buildConfiguration(ESLogger logger, Settings settings, Protocol protocol, Integer maxRetries, + String endpoint, boolean useThrottleRetries) { ClientConfiguration clientConfiguration = new ClientConfiguration(); // the response metadata cache is only there for diagnostics purposes, // but can force objects from every response to the old generation. @@ -113,43 +117,50 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsSigner.configureSigner(awsSigner, clientConfiguration, endpoint); } - AWSCredentialsProvider credentials; - - if (account == null && key == null) { - credentials = new AWSCredentialsProviderChain( - new EnvironmentVariableCredentialsProvider(), - new SystemPropertiesCredentialsProvider(), - new InstanceProfileCredentialsProvider() - ); - } else { - credentials = new AWSCredentialsProviderChain( - new StaticCredentialsProvider(new BasicAWSCredentials(account, key)) - ); - } - client = new AmazonS3Client(credentials, clientConfiguration); - - if (endpoint != null) { - client.setEndpoint(endpoint); - } - - if (pathStyleAccess != null) { - client.setS3ClientOptions(new S3ClientOptions().withPathStyleAccess(pathStyleAccess)); - } - - clients.put(clientDescriptor, client); - return client; + return clientConfiguration; } - private String getDefaultEndpoint() { - String endpoint = null; - if (CLOUD_S3.ENDPOINT_SETTING.exists(settings)) { - endpoint = CLOUD_S3.ENDPOINT_SETTING.get(settings); - logger.debug("using explicit s3 endpoint [{}]", endpoint); - } else if (CLOUD_S3.REGION_SETTING.exists(settings)) { - String region = CLOUD_S3.REGION_SETTING.get(settings); - endpoint = getEndpoint(region); - logger.debug("using s3 region [{}], with endpoint [{}]", region, endpoint); + public static AWSCredentialsProvider buildCredentials(ESLogger logger, String key, String secret) { + AWSCredentialsProvider credentials; + + if (key.isEmpty() && secret.isEmpty()) { + logger.debug("Using either environment variables, system properties or instance profile credentials"); + credentials = new AWSCredentialsProviderChain( + new EnvironmentVariableCredentialsProvider(), + new SystemPropertiesCredentialsProvider(), + new InstanceProfileCredentialsProvider() + ); + } else { + logger.debug("Using basic key/secret credentials"); + credentials = new AWSCredentialsProviderChain( + new StaticCredentialsProvider(new BasicAWSCredentials(key, secret)) + ); } + + return credentials; + } + + protected static String findEndpoint(ESLogger logger, Settings settings, String endpoint, String region) { + if (Strings.isNullOrEmpty(endpoint)) { + logger.debug("no repository level endpoint has been defined. Trying to guess from repository region [{}]", region); + if (!region.isEmpty()) { + endpoint = getEndpoint(region); + logger.debug("using s3 region [{}], with endpoint [{}]", region, endpoint); + } else { + // No region has been set so we will use the default endpoint + if (CLOUD_S3.ENDPOINT_SETTING.exists(settings)) { + endpoint = CLOUD_S3.ENDPOINT_SETTING.get(settings); + logger.debug("using explicit s3 endpoint [{}]", endpoint); + } else if (REGION_SETTING.exists(settings) || CLOUD_S3.REGION_SETTING.exists(settings)) { + region = CLOUD_S3.REGION_SETTING.get(settings); + endpoint = getEndpoint(region); + logger.debug("using s3 region [{}], with endpoint [{}]", region, endpoint); + } + } + } else { + logger.debug("using repository level endpoint [{}]", endpoint); + } + return endpoint; } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index cc91173d954..56d05b65711 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -270,11 +270,6 @@ public class S3Repository extends BlobStoreRepository { String endpoint = getValue(metadata.settings(), settings, Repository.ENDPOINT_SETTING, Repositories.ENDPOINT_SETTING); Protocol protocol = getValue(metadata.settings(), settings, Repository.PROTOCOL_SETTING, Repositories.PROTOCOL_SETTING); String region = getValue(metadata.settings(), settings, Repository.REGION_SETTING, Repositories.REGION_SETTING); - // If no region is defined either in region, repositories.s3.region, cloud.aws.s3.region or cloud.aws.region - // we fallback to Default bucket - null - if (Strings.isEmpty(region)) { - region = null; - } boolean serverSideEncryption = getValue(metadata.settings(), settings, Repository.SERVER_SIDE_ENCRYPTION_SETTING, Repositories.SERVER_SIDE_ENCRYPTION_SETTING); ByteSizeValue bufferSize = getValue(metadata.settings(), settings, Repository.BUFFER_SIZE_SETTING, Repositories.BUFFER_SIZE_SETTING); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AwsS3ServiceImplTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AwsS3ServiceImplTests.java new file mode 100644 index 00000000000..788ea8b60ed --- /dev/null +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AwsS3ServiceImplTests.java @@ -0,0 +1,313 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cloud.aws; + +import com.amazonaws.ClientConfiguration; +import com.amazonaws.Protocol; +import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSCredentialsProvider; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.repositories.s3.S3Repository; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class AwsS3ServiceImplTests extends ESTestCase { + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/19556") + public void testAWSCredentialsWithSystemProviders() { + AWSCredentialsProvider credentialsProvider = InternalAwsS3Service.buildCredentials(logger, "", ""); + + AWSCredentials credentials = credentialsProvider.getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is("DUMMY_ACCESS_KEY")); + assertThat(credentials.getAWSSecretKey(), is("DUMMY_SECRET_KEY")); + } + + public void testAWSCredentialsWithElasticsearchAwsSettings() { + Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null); + Settings settings = Settings.builder() + .put(AwsS3Service.KEY_SETTING.getKey(), "aws_key") + .put(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret") + .build(); + launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "aws_key", "aws_secret"); + } + + public void testAWSCredentialsWithElasticsearchS3Settings() { + Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null); + Settings settings = Settings.builder() + .put(AwsS3Service.CLOUD_S3.KEY_SETTING.getKey(), "s3_key") + .put(AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey(), "s3_secret") + .build(); + launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "s3_key", "s3_secret"); + } + + public void testAWSCredentialsWithElasticsearchAwsAndS3Settings() { + Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null); + Settings settings = Settings.builder() + .put(AwsS3Service.KEY_SETTING.getKey(), "aws_key") + .put(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret") + .put(AwsS3Service.CLOUD_S3.KEY_SETTING.getKey(), "s3_key") + .put(AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey(), "s3_secret") + .build(); + launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "s3_key", "s3_secret"); + } + + public void testAWSCredentialsWithElasticsearchRepositoriesSettings() { + Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null); + Settings settings = Settings.builder() + .put(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key") + .put(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret") + .build(); + launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repositories_key", "repositories_secret"); + } + + public void testAWSCredentialsWithElasticsearchAwsAndRepositoriesSettings() { + Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null); + Settings settings = Settings.builder() + .put(AwsS3Service.KEY_SETTING.getKey(), "aws_key") + .put(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret") + .put(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key") + .put(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret") + .build(); + launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repositories_key", "repositories_secret"); + } + + public void testAWSCredentialsWithElasticsearchAwsAndS3AndRepositoriesSettings() { + Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null); + Settings settings = Settings.builder() + .put(AwsS3Service.KEY_SETTING.getKey(), "aws_key") + .put(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret") + .put(AwsS3Service.CLOUD_S3.KEY_SETTING.getKey(), "s3_key") + .put(AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey(), "s3_secret") + .put(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key") + .put(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret") + .build(); + launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repositories_key", "repositories_secret"); + } + + public void testAWSCredentialsWithElasticsearchRepositoriesSettingsAndRepositorySettings() { + Settings repositorySettings = generateRepositorySettings("repository_key", "repository_secret", "eu-central", null, null); + Settings settings = Settings.builder() + .put(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key") + .put(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret") + .build(); + launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repository_key", "repository_secret"); + } + + public void testAWSCredentialsWithElasticsearchAwsAndRepositoriesSettingsAndRepositorySettings() { + Settings repositorySettings = generateRepositorySettings("repository_key", "repository_secret", "eu-central", null, null); + Settings settings = Settings.builder() + .put(AwsS3Service.KEY_SETTING.getKey(), "aws_key") + .put(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret") + .put(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key") + .put(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret") + .build(); + launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repository_key", "repository_secret"); + } + + public void testAWSCredentialsWithElasticsearchAwsAndS3AndRepositoriesSettingsAndRepositorySettings() { + Settings repositorySettings = generateRepositorySettings("repository_key", "repository_secret", "eu-central", null, null); + Settings settings = Settings.builder() + .put(AwsS3Service.KEY_SETTING.getKey(), "aws_key") + .put(AwsS3Service.SECRET_SETTING.getKey(), "aws_secret") + .put(AwsS3Service.CLOUD_S3.KEY_SETTING.getKey(), "s3_key") + .put(AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey(), "s3_secret") + .put(S3Repository.Repositories.KEY_SETTING.getKey(), "repositories_key") + .put(S3Repository.Repositories.SECRET_SETTING.getKey(), "repositories_secret") + .build(); + launchAWSCredentialsWithElasticsearchSettingsTest(repositorySettings, settings, "repository_key", "repository_secret"); + } + + protected void launchAWSCredentialsWithElasticsearchSettingsTest(Settings singleRepositorySettings, Settings settings, + String expectedKey, String expectedSecret) { + String key = S3Repository.getValue(singleRepositorySettings, settings, + S3Repository.Repository.KEY_SETTING, S3Repository.Repositories.KEY_SETTING); + String secret = S3Repository.getValue(singleRepositorySettings, settings, + S3Repository.Repository.SECRET_SETTING, S3Repository.Repositories.SECRET_SETTING); + + AWSCredentials credentials = InternalAwsS3Service.buildCredentials(logger, key, secret).getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is(expectedKey)); + assertThat(credentials.getAWSSecretKey(), is(expectedSecret)); + } + + public void testAWSDefaultConfiguration() { + Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null); + launchAWSConfigurationTest(Settings.EMPTY, repositorySettings, Protocol.HTTPS, null, -1, null, null, null, 3, false); + } + + public void testAWSConfigurationWithAwsSettings() { + Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null); + Settings settings = Settings.builder() + .put(AwsS3Service.PROTOCOL_SETTING.getKey(), "http") + .put(AwsS3Service.PROXY_HOST_SETTING.getKey(), "aws_proxy_host") + .put(AwsS3Service.PROXY_PORT_SETTING.getKey(), 8080) + .put(AwsS3Service.PROXY_USERNAME_SETTING.getKey(), "aws_proxy_username") + .put(AwsS3Service.PROXY_PASSWORD_SETTING.getKey(), "aws_proxy_password") + .put(AwsS3Service.SIGNER_SETTING.getKey(), "AWS3SignerType") + .build(); + launchAWSConfigurationTest(settings, repositorySettings, Protocol.HTTP, "aws_proxy_host", 8080, "aws_proxy_username", + "aws_proxy_password", "AWS3SignerType", 3, false); + } + + public void testAWSConfigurationWithAwsAndS3Settings() { + Settings repositorySettings = generateRepositorySettings(null, null, "eu-central", null, null); + Settings settings = Settings.builder() + .put(AwsS3Service.PROTOCOL_SETTING.getKey(), "http") + .put(AwsS3Service.PROXY_HOST_SETTING.getKey(), "aws_proxy_host") + .put(AwsS3Service.PROXY_PORT_SETTING.getKey(), 8080) + .put(AwsS3Service.PROXY_USERNAME_SETTING.getKey(), "aws_proxy_username") + .put(AwsS3Service.PROXY_PASSWORD_SETTING.getKey(), "aws_proxy_password") + .put(AwsS3Service.SIGNER_SETTING.getKey(), "AWS3SignerType") + .put(AwsS3Service.CLOUD_S3.PROTOCOL_SETTING.getKey(), "https") + .put(AwsS3Service.CLOUD_S3.PROXY_HOST_SETTING.getKey(), "s3_proxy_host") + .put(AwsS3Service.CLOUD_S3.PROXY_PORT_SETTING.getKey(), 8081) + .put(AwsS3Service.CLOUD_S3.PROXY_USERNAME_SETTING.getKey(), "s3_proxy_username") + .put(AwsS3Service.CLOUD_S3.PROXY_PASSWORD_SETTING.getKey(), "s3_proxy_password") + .put(AwsS3Service.CLOUD_S3.SIGNER_SETTING.getKey(), "NoOpSignerType") + .build(); + launchAWSConfigurationTest(settings, repositorySettings, Protocol.HTTPS, "s3_proxy_host", 8081, "s3_proxy_username", + "s3_proxy_password", "NoOpSignerType", 3, false); + } + + protected void launchAWSConfigurationTest(Settings settings, + Settings singleRepositorySettings, + Protocol expectedProtocol, + String expectedProxyHost, + int expectedProxyPort, + String expectedProxyUsername, + String expectedProxyPassword, + String expectedSigner, + Integer expectedMaxRetries, + boolean expectedUseThrottleRetries) { + Protocol protocol = S3Repository.getValue(singleRepositorySettings, settings, + S3Repository.Repository.PROTOCOL_SETTING, S3Repository.Repositories.PROTOCOL_SETTING); + Integer maxRetries = S3Repository.getValue(singleRepositorySettings, settings, + S3Repository.Repository.MAX_RETRIES_SETTING, S3Repository.Repositories.MAX_RETRIES_SETTING); + Boolean useThrottleRetries = S3Repository.getValue(singleRepositorySettings, settings, + S3Repository.Repository.USE_THROTTLE_RETRIES_SETTING, S3Repository.Repositories.USE_THROTTLE_RETRIES_SETTING); + + ClientConfiguration configuration = InternalAwsS3Service.buildConfiguration(logger, settings, protocol, maxRetries, null, + useThrottleRetries); + + assertThat(configuration.getResponseMetadataCacheSize(), is(0)); + assertThat(configuration.getProtocol(), is(expectedProtocol)); + assertThat(configuration.getProxyHost(), is(expectedProxyHost)); + assertThat(configuration.getProxyPort(), is(expectedProxyPort)); + assertThat(configuration.getProxyUsername(), is(expectedProxyUsername)); + assertThat(configuration.getProxyPassword(), is(expectedProxyPassword)); + assertThat(configuration.getSignerOverride(), is(expectedSigner)); + assertThat(configuration.getMaxErrorRetry(), is(expectedMaxRetries)); + assertThat(configuration.useThrottledRetries(), is(expectedUseThrottleRetries)); + } + + private static Settings generateRepositorySettings(String key, String secret, String region, String endpoint, Integer maxRetries) { + Settings.Builder builder = Settings.builder(); + if (region != null) { + builder.put(S3Repository.Repository.REGION_SETTING.getKey(), region); + } + if (endpoint != null) { + builder.put(S3Repository.Repository.ENDPOINT_SETTING.getKey(), endpoint); + } + if (key != null) { + builder.put(S3Repository.Repository.KEY_SETTING.getKey(), key); + } + if (secret != null) { + builder.put(S3Repository.Repository.SECRET_SETTING.getKey(), secret); + } + if (maxRetries != null) { + builder.put(S3Repository.Repository.MAX_RETRIES_SETTING.getKey(), maxRetries); + } + return builder.build(); + } + + public void testDefaultEndpoint() { + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", null, null, null), Settings.EMPTY, ""); + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", "eu-central", null, null), Settings.EMPTY, + "s3.eu-central-1.amazonaws.com"); + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", null, "repository.endpoint", null), + Settings.EMPTY, "repository.endpoint"); + } + + public void testSpecificEndpoint() { + Settings settings = Settings.builder() + .put(InternalAwsS3Service.CLOUD_S3.ENDPOINT_SETTING.getKey(), "ec2.endpoint") + .build(); + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", null, null, null), settings, + "ec2.endpoint"); + // Endpoint has precedence on region. Whatever region we set, we won't use it + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", "eu-central", null, null), settings, + "ec2.endpoint"); + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", null, "repository.endpoint", null), + settings, "repository.endpoint"); + } + + public void testRegionWithAwsSettings() { + Settings settings = Settings.builder() + .put(InternalAwsS3Service.REGION_SETTING.getKey(), randomFrom("eu-west", "eu-west-1")) + .build(); + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", null, null, null), settings, + "s3-eu-west-1.amazonaws.com"); + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", "eu-central", null, null), settings, + "s3.eu-central-1.amazonaws.com"); + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", null, "repository.endpoint", null), + settings, "repository.endpoint"); + } + + public void testRegionWithAwsAndS3Settings() { + Settings settings = Settings.builder() + .put(InternalAwsS3Service.REGION_SETTING.getKey(), randomFrom("eu-west", "eu-west-1")) + .put(InternalAwsS3Service.CLOUD_S3.REGION_SETTING.getKey(), randomFrom("us-west", "us-west-1")) + .build(); + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", null, null, null), settings, + "s3-us-west-1.amazonaws.com"); + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", "eu-central", null, null), settings, + "s3.eu-central-1.amazonaws.com"); + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", null, "repository.endpoint", null), + settings, "repository.endpoint"); + } + + public void testInvalidRegion() { + Settings settings = Settings.builder() + .put(InternalAwsS3Service.REGION_SETTING.getKey(), "does-not-exist") + .build(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", null, null, null), settings, null); + }); + assertThat(e.getMessage(), containsString("No automatic endpoint could be derived from region")); + + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", "eu-central", null, null), settings, + "s3.eu-central-1.amazonaws.com"); + launchAWSEndpointTest(generateRepositorySettings("repository_key", "repository_secret", null, "repository.endpoint", null), + settings, "repository.endpoint"); + } + + protected void launchAWSEndpointTest(Settings singleRepositorySettings, Settings settings, + String expectedEndpoint) { + String region = S3Repository.getValue(singleRepositorySettings, settings, + S3Repository.Repository.REGION_SETTING, S3Repository.Repositories.REGION_SETTING); + String endpoint = S3Repository.getValue(singleRepositorySettings, settings, + S3Repository.Repository.ENDPOINT_SETTING, S3Repository.Repositories.ENDPOINT_SETTING); + + String foundEndpoint = InternalAwsS3Service.findEndpoint(logger, settings, endpoint, region); + assertThat(foundEndpoint, is(expectedEndpoint)); + } + +} From 2d1b0587dd57a74c48022f5456467ab8873a2d0c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 22 Jul 2016 22:26:35 -0400 Subject: [PATCH 47/93] Introduce Netty 4 This commit adds transport-netty4, a transport and HTTP implementation based on Netty 4. Relates #19526 --- build.gradle | 1 + .../elasticsearch/gradle/BuildPlugin.groovy | 4 + client/transport/build.gradle | 1 + .../client/PreBuiltTransportClient.java | 52 +- .../client/PreBuiltTransportClientTests.java | 2 + .../common/bytes/PagedBytesReference.java | 21 +- .../bytes/ReleasablePagedBytesReference.java | 7 +- .../common/io/ReleasableBytesStream.java | 1 + .../common/io/stream/BytesStreamOutput.java | 14 +- .../stream/ReleasableBytesStreamOutput.java | 7 +- .../common/lease/Releasable.java | 1 + .../elasticsearch/http/HttpServerAdapter.java | 1 + .../http/HttpServerTransport.java | 4 +- .../rest/AbstractRestChannel.java | 1 + .../org/elasticsearch/rest/RestChannel.java | 2 + .../elasticsearch/rest/RestController.java | 2 +- .../org/elasticsearch/rest/RestRequest.java | 50 +- .../elasticsearch/transport/TcpTransport.java | 27 +- .../transport/TcpTransportChannel.java | 3 - .../transport/TransportService.java | 4 +- .../transport/TransportServiceAdapter.java | 8 +- .../transport/local/LocalTransport.java | 4 +- .../elasticsearch/http/HttpServerTests.java | 28 +- .../rest/BytesRestResponseTests.java | 38 +- distribution/build.gradle | 3 +- distribution/integ-test-zip/build.gradle | 1 - modules/reindex/build.gradle | 2 +- .../index/reindex/RetryTests.java | 61 +- .../http/netty3/Netty3HttpRequest.java | 49 +- .../http/netty3/Netty3HttpRequestHandler.java | 1 - .../netty3/Netty3HttpServerTransport.java | 18 +- .../netty3/Netty3MessageChannelHandler.java | 2 +- .../transport/netty3/Netty3Transport.java | 40 +- .../elasticsearch/ESNetty3IntegTestCase.java | 6 +- ...Netty3TransportMultiPortIntegrationIT.java | 2 +- modules/transport-netty4/build.gradle | 170 ++++++ ...ffer-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 | 1 + .../licenses/netty-buffer-LICENSE.txt | 202 +++++++ .../licenses/netty-buffer-NOTICE.txt | 116 ++++ ...odec-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 | 1 + .../licenses/netty-codec-LICENSE.txt | 202 +++++++ .../licenses/netty-codec-NOTICE.txt | 116 ++++ ...http-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 | 1 + .../licenses/netty-codec-http-LICENSE.txt | 202 +++++++ .../licenses/netty-codec-http-NOTICE.txt | 116 ++++ ...mmon-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 | 1 + .../licenses/netty-common-LICENSE.txt | 202 +++++++ .../licenses/netty-common-NOTICE.txt | 116 ++++ ...dler-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 | 1 + .../licenses/netty-handler-LICENSE.txt | 202 +++++++ .../licenses/netty-handler-NOTICE.txt | 116 ++++ ...lver-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 | 1 + .../licenses/netty-resolver-LICENSE.txt | 202 +++++++ .../licenses/netty-resolver-NOTICE.txt | 116 ++++ ...port-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 | 1 + .../licenses/netty-transport-LICENSE.txt | 202 +++++++ .../licenses/netty-transport-NOTICE.txt | 116 ++++ .../http/netty4/Netty4HttpChannel.java | 268 ++++++++ .../http/netty4/Netty4HttpRequest.java | 134 ++++ .../http/netty4/Netty4HttpRequestHandler.java | 78 +++ .../netty4/Netty4HttpServerTransport.java | 571 ++++++++++++++++++ .../http/netty4/cors/Netty4CorsConfig.java | 235 +++++++ .../netty4/cors/Netty4CorsConfigBuilder.java | 357 +++++++++++ .../http/netty4/cors/Netty4CorsHandler.java | 235 +++++++ .../pipelining/HttpPipelinedRequest.java | 91 +++ .../pipelining/HttpPipelinedResponse.java | 95 +++ .../pipelining/HttpPipeliningHandler.java | 108 ++++ .../elasticsearch/transport/Netty4Plugin.java | 94 +++ .../netty4/ByteBufBytesReference.java | 92 +++ .../transport/netty4/ByteBufStreamInput.java | 141 +++++ .../netty4/Netty4InternalESLogger.java | 187 ++++++ .../netty4/Netty4MessageChannelHandler.java | 86 +++ .../netty4/Netty4OpenChannelsHandler.java | 98 +++ .../netty4/Netty4SizeHeaderFrameDecoder.java | 50 ++ .../transport/netty4/Netty4Transport.java | 506 ++++++++++++++++ .../transport/netty4/Netty4Utils.java | 121 ++++ .../plugin-metadata/plugin-security.policy | 24 + .../elasticsearch/ESNetty4IntegTestCase.java | 72 +++ .../http/netty4/Netty4HttpChannelTests.java | 490 +++++++++++++++ .../http/netty4/Netty4HttpClient.java | 190 ++++++ .../netty4/Netty4HttpPublishPortTests.java | 92 +++ .../netty4/Netty4HttpRequestSizeLimitIT.java | 131 ++++ .../Netty4HttpServerPipeliningTests.java | 260 ++++++++ .../Netty4HttpServerTransportTests.java | 91 +++ .../netty4/Netty4PipeliningDisabledIT.java | 77 +++ .../netty4/Netty4PipeliningEnabledIT.java | 76 +++ .../http/netty4/Netty4RestIT.java | 41 ++ .../Netty4HttpPipeliningHandlerTests.java | 262 ++++++++ .../netty4/ByteBufBytesReferenceTests.java | 79 +++ .../netty4/Netty4ScheduledPingTests.java | 148 +++++ .../Netty4SizeHeaderFrameDecoderTests.java | 107 ++++ .../transport/netty4/Netty4TransportIT.java | 124 ++++ ...Netty4TransportMultiPortIntegrationIT.java | 111 ++++ .../Netty4TransportPublishAddressIT.java | 91 +++ .../transport/netty4/Netty4UtilsTests.java | 98 +++ .../netty4/NettyTransportMultiPortTests.java | 146 +++++ .../netty4/SimpleNetty4TransportTests.java | 79 +++ .../rest-api-spec/test/10_basic.yaml | 13 + .../smoketest/ESSmokeClientTestCase.java | 24 +- qa/smoke-test-http/build.gradle | 1 + .../elasticsearch/http/HttpCompressionIT.java | 1 + .../elasticsearch/http/HttpSmokeTestCase.java | 58 +- settings.gradle | 1 + .../test/ESBackcompatTestCase.java | 2 +- .../org/elasticsearch/test/ExternalNode.java | 3 +- .../test/rest/ESRestTestCase.java | 1 + .../test/rest/FakeRestRequest.java | 41 +- .../transport/MockTcpTransport.java | 6 +- .../transport/MockTransportClient.java | 1 + 109 files changed, 9095 insertions(+), 264 deletions(-) create mode 100644 modules/transport-netty4/build.gradle create mode 100644 modules/transport-netty4/licenses/netty-buffer-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-buffer-LICENSE.txt create mode 100644 modules/transport-netty4/licenses/netty-buffer-NOTICE.txt create mode 100644 modules/transport-netty4/licenses/netty-codec-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-codec-LICENSE.txt create mode 100644 modules/transport-netty4/licenses/netty-codec-NOTICE.txt create mode 100644 modules/transport-netty4/licenses/netty-codec-http-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-codec-http-LICENSE.txt create mode 100644 modules/transport-netty4/licenses/netty-codec-http-NOTICE.txt create mode 100644 modules/transport-netty4/licenses/netty-common-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-common-LICENSE.txt create mode 100644 modules/transport-netty4/licenses/netty-common-NOTICE.txt create mode 100644 modules/transport-netty4/licenses/netty-handler-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-handler-LICENSE.txt create mode 100644 modules/transport-netty4/licenses/netty-handler-NOTICE.txt create mode 100644 modules/transport-netty4/licenses/netty-resolver-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-resolver-LICENSE.txt create mode 100644 modules/transport-netty4/licenses/netty-resolver-NOTICE.txt create mode 100644 modules/transport-netty4/licenses/netty-transport-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-transport-LICENSE.txt create mode 100644 modules/transport-netty4/licenses/netty-transport-NOTICE.txt create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfig.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfigBuilder.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsHandler.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedRequest.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedResponse.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ByteBufBytesReference.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ByteBufStreamInput.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoder.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java create mode 100644 modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/ESNetty4IntegTestCase.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPublishPortTests.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestSizeLimitIT.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningDisabledIT.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningEnabledIT.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ByteBufBytesReferenceTests.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportIT.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportPublishAddressIT.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4UtilsTests.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyTransportMultiPortTests.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java create mode 100644 modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yaml diff --git a/build.gradle b/build.gradle index 807e3cfd222..c2bf6c9309f 100644 --- a/build.gradle +++ b/build.gradle @@ -175,6 +175,7 @@ subprojects { "org.elasticsearch.test:logger-usage:${version}": ':test:logger-usage', // for transport client "org.elasticsearch.plugin:transport-netty3-client:${version}": ':modules:transport-netty3', + "org.elasticsearch.plugin:transport-netty4-client:${version}": ':modules:transport-netty4', "org.elasticsearch.plugin:reindex-client:${version}": ':modules:reindex', "org.elasticsearch.plugin:lang-mustache-client:${version}": ':modules:lang-mustache', "org.elasticsearch.plugin:percolator-client:${version}": ':modules:percolator', diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index f825819cb3e..16f3535d9f3 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -297,6 +297,10 @@ class BuildPlugin implements Plugin { url "http://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/${revision}" } } + repos.maven { + name 'netty-snapshots' + url "http://s3.amazonaws.com/download.elasticsearch.org/nettysnapshots/20160722" + } } /** Returns a closure which can be used with a MavenPom for removing transitive dependencies. */ diff --git a/client/transport/build.gradle b/client/transport/build.gradle index 3c4f967f766..3573467b0ef 100644 --- a/client/transport/build.gradle +++ b/client/transport/build.gradle @@ -26,6 +26,7 @@ group = 'org.elasticsearch.client' dependencies { compile "org.elasticsearch:elasticsearch:${version}" compile project(path: ':modules:transport-netty3', configuration: 'runtime') + compile project(path: ':modules:transport-netty4', configuration: 'runtime') compile project(path: ':modules:reindex', configuration: 'runtime') compile project(path: ':modules:lang-mustache', configuration: 'runtime') compile project(path: ':modules:percolator', configuration: 'runtime') diff --git a/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java b/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java index 30d4beddae2..aebcf5fa3a3 100644 --- a/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java +++ b/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java @@ -16,30 +16,47 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.transport.client; import org.elasticsearch.client.transport.TransportClient; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.reindex.ReindexPlugin; import org.elasticsearch.percolator.PercolatorPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.mustache.MustachePlugin; import org.elasticsearch.transport.Netty3Plugin; +import org.elasticsearch.transport.Netty4Plugin; import java.util.Arrays; import java.util.Collection; import java.util.Collections; - +import java.util.List; /** * A builder to create an instance of {@link TransportClient} - * This class pre-installs the {@link Netty3Plugin}, {@link ReindexPlugin}, {@link PercolatorPlugin}, and {@link MustachePlugin} + * This class pre-installs the + * {@link Netty3Plugin}, + * {@link Netty4Plugin}, + * {@link ReindexPlugin}, + * {@link PercolatorPlugin}, + * and {@link MustachePlugin} * for the client. These plugins are all elasticsearch core modules required. */ @SuppressWarnings({"unchecked","varargs"}) public class PreBuiltTransportClient extends TransportClient { - private static final Collection> PRE_INSTALLED_PLUGINS = Collections.unmodifiableList(Arrays.asList( - TransportPlugin.class, ReindexPlugin.class, PercolatorPlugin.class, MustachePlugin.class)); + + private static final Collection> PRE_INSTALLED_PLUGINS = + Collections.unmodifiableList( + Arrays.asList( + Netty3Plugin.class, + Netty4Plugin.class, + TransportPlugin.class, + ReindexPlugin.class, + PercolatorPlugin.class, + MustachePlugin.class)); @SafeVarargs public PreBuiltTransportClient(Settings settings, Class... plugins) { @@ -50,14 +67,25 @@ public class PreBuiltTransportClient extends TransportClient { super(settings, Settings.EMPTY, addPlugins(plugins, PRE_INSTALLED_PLUGINS)); } - /** - * The default transport implementation for the transport client. - */ - public static final class TransportPlugin extends Netty3Plugin { - // disable assertions for permissions since we might not have the permissions here - // compared to if we are loaded as a real module to the es server - public TransportPlugin(Settings settings) { - super(Settings.builder().put("netty.assert.buglevel", false).put(settings).build()); + public static final class TransportPlugin extends Plugin { + + private static final Setting ASSERT_NETTY_BUGLEVEL = + Setting.boolSetting("netty.assert.buglevel", true, Setting.Property.NodeScope); + + @Override + public List> getSettings() { + return Collections.singletonList(ASSERT_NETTY_BUGLEVEL); } + + @Override + public Settings additionalSettings() { + return Settings.builder() + .put(NetworkModule.TRANSPORT_TYPE_KEY, Netty3Plugin.NETTY_TRANSPORT_NAME) + .put(NetworkModule.HTTP_TYPE_KEY, Netty3Plugin.NETTY_HTTP_TRANSPORT_NAME) + .put("netty.assert.buglevel", true) + .build(); + } + } + } diff --git a/client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java b/client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java index ed300b70021..5b72006f5f0 100644 --- a/client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java +++ b/client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.transport.client; import com.carrotsearch.randomizedtesting.RandomizedTest; @@ -57,4 +58,5 @@ public class PreBuiltTransportClientTests extends RandomizedTest { } } } + } diff --git a/core/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java b/core/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java index ef1102326d6..b336acfba20 100644 --- a/core/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java +++ b/core/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.bytes; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ByteArray; @@ -36,24 +35,24 @@ public class PagedBytesReference extends BytesReference { private static final int PAGE_SIZE = BigArrays.BYTE_PAGE_SIZE; private final BigArrays bigarrays; - protected final ByteArray bytearray; + protected final ByteArray byteArray; private final int offset; private final int length; - public PagedBytesReference(BigArrays bigarrays, ByteArray bytearray, int length) { - this(bigarrays, bytearray, 0, length); + public PagedBytesReference(BigArrays bigarrays, ByteArray byteArray, int length) { + this(bigarrays, byteArray, 0, length); } - public PagedBytesReference(BigArrays bigarrays, ByteArray bytearray, int from, int length) { + public PagedBytesReference(BigArrays bigarrays, ByteArray byteArray, int from, int length) { this.bigarrays = bigarrays; - this.bytearray = bytearray; + this.byteArray = byteArray; this.offset = from; this.length = length; } @Override public byte get(int index) { - return bytearray.get(offset + index); + return byteArray.get(offset + index); } @Override @@ -66,14 +65,14 @@ public class PagedBytesReference extends BytesReference { if (from < 0 || (from + length) > length()) { throw new IllegalArgumentException("can't slice a buffer with length [" + length() + "], with slice parameters from [" + from + "], length [" + length + "]"); } - return new PagedBytesReference(bigarrays, bytearray, offset + from, length); + return new PagedBytesReference(bigarrays, byteArray, offset + from, length); } @Override public BytesRef toBytesRef() { BytesRef bref = new BytesRef(); // if length <= pagesize this will dereference the page, or materialize the byte[] - bytearray.get(offset, length, bref); + byteArray.get(offset, length, bref); return bref; } @@ -95,7 +94,7 @@ public class PagedBytesReference extends BytesReference { @Override public BytesRef next() throws IOException { if (nextFragmentSize != 0) { - final boolean materialized = bytearray.get(offset + position, nextFragmentSize, slice); + final boolean materialized = byteArray.get(offset + position, nextFragmentSize, slice); assert materialized == false : "iteration should be page aligned but array got materialized"; position += nextFragmentSize; final int remaining = length - position; @@ -111,6 +110,6 @@ public class PagedBytesReference extends BytesReference { @Override public long ramBytesUsed() { - return bytearray.ramBytesUsed(); + return byteArray.ramBytesUsed(); } } diff --git a/core/src/main/java/org/elasticsearch/common/bytes/ReleasablePagedBytesReference.java b/core/src/main/java/org/elasticsearch/common/bytes/ReleasablePagedBytesReference.java index 2152aa226a8..2700ea4dc13 100644 --- a/core/src/main/java/org/elasticsearch/common/bytes/ReleasablePagedBytesReference.java +++ b/core/src/main/java/org/elasticsearch/common/bytes/ReleasablePagedBytesReference.java @@ -30,12 +30,13 @@ import org.elasticsearch.common.util.ByteArray; */ public final class ReleasablePagedBytesReference extends PagedBytesReference implements Releasable { - public ReleasablePagedBytesReference(BigArrays bigarrays, ByteArray bytearray, int length) { - super(bigarrays, bytearray, length); + public ReleasablePagedBytesReference(BigArrays bigarrays, ByteArray byteArray, int length) { + super(bigarrays, byteArray, length); } @Override public void close() { - Releasables.close(bytearray); + Releasables.close(byteArray); } + } diff --git a/core/src/main/java/org/elasticsearch/common/io/ReleasableBytesStream.java b/core/src/main/java/org/elasticsearch/common/io/ReleasableBytesStream.java index 15bc324a346..e31f206bcad 100644 --- a/core/src/main/java/org/elasticsearch/common/io/ReleasableBytesStream.java +++ b/core/src/main/java/org/elasticsearch/common/io/ReleasableBytesStream.java @@ -28,4 +28,5 @@ public interface ReleasableBytesStream extends BytesStream { @Override ReleasablePagedBytesReference bytes(); + } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/BytesStreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/BytesStreamOutput.java index 2d2719a113e..3de5c757ae1 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/BytesStreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/BytesStreamOutput.java @@ -33,7 +33,7 @@ import java.io.IOException; */ public class BytesStreamOutput extends StreamOutput implements BytesStream { - protected final BigArrays bigarrays; + protected final BigArrays bigArrays; protected ByteArray bytes; protected int count; @@ -57,9 +57,9 @@ public class BytesStreamOutput extends StreamOutput implements BytesStream { this(expectedSize, BigArrays.NON_RECYCLING_INSTANCE); } - protected BytesStreamOutput(int expectedSize, BigArrays bigarrays) { - this.bigarrays = bigarrays; - this.bytes = bigarrays.newByteArray(expectedSize); + protected BytesStreamOutput(int expectedSize, BigArrays bigArrays) { + this.bigArrays = bigArrays; + this.bytes = bigArrays.newByteArray(expectedSize); } @Override @@ -100,7 +100,7 @@ public class BytesStreamOutput extends StreamOutput implements BytesStream { public void reset() { // shrink list of pages if (bytes.size() > BigArrays.PAGE_SIZE_IN_BYTES) { - bytes = bigarrays.resize(bytes, BigArrays.PAGE_SIZE_IN_BYTES); + bytes = bigArrays.resize(bytes, BigArrays.PAGE_SIZE_IN_BYTES); } // go back to start @@ -145,7 +145,7 @@ public class BytesStreamOutput extends StreamOutput implements BytesStream { @Override public BytesReference bytes() { - return new PagedBytesReference(bigarrays, bytes, count); + return new PagedBytesReference(bigArrays, bytes, count); } /** @@ -157,7 +157,7 @@ public class BytesStreamOutput extends StreamOutput implements BytesStream { } private void ensureCapacity(int offset) { - bytes = bigarrays.grow(bytes, offset); + bytes = bigArrays.grow(bytes, offset); } } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/ReleasableBytesStreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/ReleasableBytesStreamOutput.java index 4e5380e66d5..674ff18f0fc 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/ReleasableBytesStreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/ReleasableBytesStreamOutput.java @@ -36,12 +36,13 @@ public class ReleasableBytesStreamOutput extends BytesStreamOutput implements Re super(BigArrays.PAGE_SIZE_IN_BYTES, bigarrays); } - public ReleasableBytesStreamOutput(int expectedSize, BigArrays bigarrays) { - super(expectedSize, bigarrays); + public ReleasableBytesStreamOutput(int expectedSize, BigArrays bigArrays) { + super(expectedSize, bigArrays); } @Override public ReleasablePagedBytesReference bytes() { - return new ReleasablePagedBytesReference(bigarrays, bytes, count); + return new ReleasablePagedBytesReference(bigArrays, bytes, count); } + } diff --git a/core/src/main/java/org/elasticsearch/common/lease/Releasable.java b/core/src/main/java/org/elasticsearch/common/lease/Releasable.java index a86d812016e..61b1e85c749 100644 --- a/core/src/main/java/org/elasticsearch/common/lease/Releasable.java +++ b/core/src/main/java/org/elasticsearch/common/lease/Releasable.java @@ -30,4 +30,5 @@ public interface Releasable extends Closeable { @Override void close(); + } diff --git a/core/src/main/java/org/elasticsearch/http/HttpServerAdapter.java b/core/src/main/java/org/elasticsearch/http/HttpServerAdapter.java index 7fe5562f17a..16b0bd00443 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpServerAdapter.java +++ b/core/src/main/java/org/elasticsearch/http/HttpServerAdapter.java @@ -29,4 +29,5 @@ import org.elasticsearch.rest.RestRequest; public interface HttpServerAdapter { void dispatchRequest(RestRequest request, RestChannel channel, ThreadContext context); + } diff --git a/core/src/main/java/org/elasticsearch/http/HttpServerTransport.java b/core/src/main/java/org/elasticsearch/http/HttpServerTransport.java index ab8b2bd1797..0ec57e2bfc2 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpServerTransport.java +++ b/core/src/main/java/org/elasticsearch/http/HttpServerTransport.java @@ -22,9 +22,6 @@ package org.elasticsearch.http; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.transport.BoundTransportAddress; -/** - * - */ public interface HttpServerTransport extends LifecycleComponent { BoundTransportAddress boundAddress(); @@ -34,4 +31,5 @@ public interface HttpServerTransport extends LifecycleComponent { HttpStats stats(); void httpServerAdapter(HttpServerAdapter httpServerAdapter); + } diff --git a/core/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java b/core/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java index aa00943ed08..f5d4f4eb695 100644 --- a/core/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java +++ b/core/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java @@ -101,4 +101,5 @@ public abstract class AbstractRestChannel implements RestChannel { public boolean detailedErrorsEnabled() { return detailedErrorsEnabled; } + } diff --git a/core/src/main/java/org/elasticsearch/rest/RestChannel.java b/core/src/main/java/org/elasticsearch/rest/RestChannel.java index d895285ca8c..2a56313fd8a 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestChannel.java +++ b/core/src/main/java/org/elasticsearch/rest/RestChannel.java @@ -30,6 +30,7 @@ import java.io.IOException; * A channel used to construct bytes / builder based outputs, and send responses. */ public interface RestChannel { + XContentBuilder newBuilder() throws IOException; XContentBuilder newErrorBuilder() throws IOException; @@ -46,4 +47,5 @@ public interface RestChannel { boolean detailedErrorsEnabled(); void sendResponse(RestResponse response); + } diff --git a/core/src/main/java/org/elasticsearch/rest/RestController.java b/core/src/main/java/org/elasticsearch/rest/RestController.java index 7072c4d59de..cf7c811a84c 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestController.java +++ b/core/src/main/java/org/elasticsearch/rest/RestController.java @@ -193,7 +193,7 @@ public class RestController extends AbstractLifecycleComponent { if (!checkRequestParameters(request, channel)) { return; } - try (ThreadContext.StoredContext t = threadContext.stashContext()) { + try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { for (String key : headersToCopy) { String httpHeader = request.header(key); if (httpHeader != null) { diff --git a/core/src/main/java/org/elasticsearch/rest/RestRequest.java b/core/src/main/java/org/elasticsearch/rest/RestRequest.java index 8872484d589..897ba294039 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/core/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -29,16 +29,35 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.rest.support.RestUtils; import java.net.SocketAddress; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue; import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; -/** - * - */ public abstract class RestRequest implements ToXContent.Params { + private final Map params; + private final String rawPath; + + public RestRequest(String uri) { + final Map params = new HashMap<>(); + int pathEndPos = uri.indexOf('?'); + if (pathEndPos < 0) { + this.rawPath = uri; + } else { + this.rawPath = uri.substring(0, pathEndPos); + RestUtils.decodeQueryString(uri, pathEndPos + 1, params); + } + this.params = params; + } + + public RestRequest(Map params, String path) { + this.params = params; + this.rawPath = path; + } + public enum Method { GET, POST, PUT, DELETE, OPTIONS, HEAD } @@ -53,7 +72,9 @@ public abstract class RestRequest implements ToXContent.Params { /** * The non decoded, raw path provided. */ - public abstract String rawPath(); + public String rawPath() { + return rawPath; + } /** * The path part of the URI (without the query string), decoded. @@ -80,12 +101,27 @@ public abstract class RestRequest implements ToXContent.Params { return null; } - public abstract boolean hasParam(String key); + public final boolean hasParam(String key) { + return params.containsKey(key); + } @Override - public abstract String param(String key); + public final String param(String key) { + return params.get(key); + } - public abstract Map params(); + @Override + public final String param(String key, String defaultValue) { + String value = params.get(key); + if (value == null) { + return defaultValue; + } + return value; + } + + public Map params() { + return params; + } public float paramAsFloat(String key, float defaultValue) { String sValue = param(key); diff --git a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java index 0e601ecb5b5..7145777aad7 100644 --- a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -93,6 +93,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Supplier; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -1076,23 +1077,23 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i if (buffer.get(offset) != 'E' || buffer.get(offset + 1) != 'S') { // special handling for what is probably HTTP if (bufferStartsWith(buffer, offset, "GET ") || - bufferStartsWith(buffer, offset, "POST ") || - bufferStartsWith(buffer, offset, "PUT ") || - bufferStartsWith(buffer, offset, "HEAD ") || - bufferStartsWith(buffer, offset, "DELETE ") || - bufferStartsWith(buffer, offset, "OPTIONS ") || - bufferStartsWith(buffer, offset, "PATCH ") || - bufferStartsWith(buffer, offset, "TRACE ")) { + bufferStartsWith(buffer, offset, "POST ") || + bufferStartsWith(buffer, offset, "PUT ") || + bufferStartsWith(buffer, offset, "HEAD ") || + bufferStartsWith(buffer, offset, "DELETE ") || + bufferStartsWith(buffer, offset, "OPTIONS ") || + bufferStartsWith(buffer, offset, "PATCH ") || + bufferStartsWith(buffer, offset, "TRACE ")) { throw new HttpOnTransportException("This is not a HTTP port"); } // we have 6 readable bytes, show 4 (should be enough) throw new StreamCorruptedException("invalid internal transport message format, got (" - + Integer.toHexString(buffer.get(offset) & 0xFF) + "," - + Integer.toHexString(buffer.get(offset + 1) & 0xFF) + "," - + Integer.toHexString(buffer.get(offset + 2) & 0xFF) + "," - + Integer.toHexString(buffer.get(offset + 3) & 0xFF) + ")"); + + Integer.toHexString(buffer.get(offset) & 0xFF) + "," + + Integer.toHexString(buffer.get(offset + 1) & 0xFF) + "," + + Integer.toHexString(buffer.get(offset + 2) & 0xFF) + "," + + Integer.toHexString(buffer.get(offset + 3) & 0xFF) + ")"); } final int dataLen; @@ -1111,7 +1112,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i // safety against too large frames being sent if (dataLen > NINETY_PER_HEAP_SIZE) { throw new IllegalArgumentException("transport content length received [" + new ByteSizeValue(dataLen) + "] exceeded [" - + new ByteSizeValue(NINETY_PER_HEAP_SIZE) + "]"); + + new ByteSizeValue(NINETY_PER_HEAP_SIZE) + "]"); } if (buffer.length() < dataLen + sizeHeaderLength) { @@ -1159,7 +1160,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i public final void messageReceived(BytesReference reference, Channel channel, String profileName, InetSocketAddress remoteAddress, int messageLengthBytes) throws IOException { final int totalMessageSize = messageLengthBytes + TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE; - transportServiceAdapter.received(totalMessageSize); + transportServiceAdapter.addBytesReceived(totalMessageSize); // we have additional bytes to read, outside of the header boolean hasMessageBytesToRead = (totalMessageSize - TcpHeader.HEADER_SIZE) > 0; StreamInput streamIn = reference.streamInput(); diff --git a/core/src/main/java/org/elasticsearch/transport/TcpTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/TcpTransportChannel.java index 16c9ede851b..1fceb5aa1a3 100644 --- a/core/src/main/java/org/elasticsearch/transport/TcpTransportChannel.java +++ b/core/src/main/java/org/elasticsearch/transport/TcpTransportChannel.java @@ -23,9 +23,6 @@ import org.elasticsearch.Version; import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; -/** - * - */ public final class TcpTransportChannel implements TransportChannel { private final TcpTransport transport; protected final Version version; diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 765bb988777..15164a5d201 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -625,12 +625,12 @@ public class TransportService extends AbstractLifecycleComponent { final MeanMetric txMetric = new MeanMetric(); @Override - public void received(long size) { + public void addBytesReceived(long size) { rxMetric.inc(size); } @Override - public void sent(long size) { + public void addBytesSent(long size) { txMetric.inc(size); } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportServiceAdapter.java b/core/src/main/java/org/elasticsearch/transport/TransportServiceAdapter.java index 5a1b9e9b349..34910532a02 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportServiceAdapter.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportServiceAdapter.java @@ -21,14 +21,11 @@ package org.elasticsearch.transport; import org.elasticsearch.cluster.node.DiscoveryNode; -/** - * - */ public interface TransportServiceAdapter { - void received(long size); + void addBytesReceived(long size); - void sent(long size); + void addBytesSent(long size); /** called by the {@link Transport} implementation once a request has been sent */ void onRequestSent(DiscoveryNode node, long requestId, String action, TransportRequest request, TransportRequestOptions options); @@ -57,4 +54,5 @@ public interface TransportServiceAdapter { void raiseNodeConnected(DiscoveryNode node); void raiseNodeDisconnected(DiscoveryNode node); + } diff --git a/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java b/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java index c94e62ea422..61559442ff3 100644 --- a/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java @@ -229,7 +229,7 @@ public class LocalTransport extends AbstractLifecycleComponent implements Transp } final byte[] data = BytesReference.toBytes(stream.bytes()); - transportServiceAdapter.sent(data.length); + transportServiceAdapter.addBytesSent(data.length); transportServiceAdapter.onRequestSent(node, requestId, action, request, options); targetTransport.receiveMessage(version, data, action, requestId, this); } @@ -272,7 +272,7 @@ public class LocalTransport extends AbstractLifecycleComponent implements Transp @Nullable final Long sendRequestId) { Transports.assertTransportThread(); try { - transportServiceAdapter.received(data.length); + transportServiceAdapter.addBytesReceived(data.length); StreamInput stream = StreamInput.wrap(data); stream.setVersion(version); diff --git a/core/src/test/java/org/elasticsearch/http/HttpServerTests.java b/core/src/test/java/org/elasticsearch/http/HttpServerTests.java index f9de466e978..87167cdb733 100644 --- a/core/src/test/java/org/elasticsearch/http/HttpServerTests.java +++ b/core/src/test/java/org/elasticsearch/http/HttpServerTests.java @@ -189,11 +189,11 @@ public class HttpServerTests extends ESTestCase { } private static final class TestRestRequest extends RestRequest { - private final String path; + private final BytesReference content; private TestRestRequest(String path, String content) { - this.path = path; + super(Collections.emptyMap(), path); this.content = new BytesArray(content); } @@ -207,11 +207,6 @@ public class HttpServerTests extends ESTestCase { return null; } - @Override - public String rawPath() { - return path; - } - @Override public boolean hasContent() { return true; @@ -232,24 +227,5 @@ public class HttpServerTests extends ESTestCase { return null; } - @Override - public boolean hasParam(String key) { - return false; - } - - @Override - public String param(String key) { - return null; - } - - @Override - public String param(String key, String defaultValue) { - return null; - } - - @Override - public Map params() { - return null; - } } } diff --git a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index 051159b448b..642b3a53a34 100644 --- a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.Index; import org.elasticsearch.rest.support.RestUtils; import org.elasticsearch.search.SearchShardTarget; @@ -33,11 +34,15 @@ import org.elasticsearch.transport.RemoteTransportException; import java.io.FileNotFoundException; import java.io.IOException; +import java.util.Collections; +import java.util.Map; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -157,8 +162,37 @@ public class BytesRestResponseTests extends ESTestCase { public void testResponseWhenPathContainsEncodingError() throws IOException { final String path = "%a"; - final RestRequest request = mock(RestRequest.class); - when(request.rawPath()).thenReturn(path); + final RestRequest request = new RestRequest(Collections.emptyMap(), path) { + @Override + public Method method() { + return null; + } + + @Override + public String uri() { + return null; + } + + @Override + public boolean hasContent() { + return false; + } + + @Override + public BytesReference content() { + return null; + } + + @Override + public String header(String name) { + return null; + } + + @Override + public Iterable> headers() { + return null; + } + }; final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestUtils.decodeComponent(request.rawPath())); final RestChannel channel = new DetailedExceptionRestChannel(request); // if we try to decode the path, this will throw an IllegalArgumentException again diff --git a/distribution/build.gradle b/distribution/build.gradle index dcd5f170fc5..bfbf96b5d2a 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -72,7 +72,8 @@ project.rootProject.subprojects.findAll { it.path.startsWith(':modules:') }.each // See https://discuss.gradle.org/t/cross-project-task-dependencies-ordering-screws-up-finalizers/13190 project.configure(project.subprojects.findAll { it.name != 'integ-test-zip' }) { Project distribution -> distribution.afterEvaluate({ - distribution.integTest.mustRunAfter("${module.path}:integTest#stop") + // some integTest tasks will have multiple finalizers + distribution.integTest.mustRunAfter module.tasks.find { t -> t.name.matches(".*integTest\$") }.getFinalizedBy() }) } // also want to make sure the module's integration tests run after the integ-test-zip (ie rest tests) diff --git a/distribution/integ-test-zip/build.gradle b/distribution/integ-test-zip/build.gradle index 67f99aa884a..f1f3a2c26c5 100644 --- a/distribution/integ-test-zip/build.gradle +++ b/distribution/integ-test-zip/build.gradle @@ -36,4 +36,3 @@ publishing { } integTest.dependsOn buildZip - diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 0d02d1d9474..e159b3af8c5 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -35,7 +35,6 @@ run { setting 'reindex.remote.whitelist', 'myself' } - dependencies { compile "org.elasticsearch.client:rest:${version}" // dependencies of the rest client @@ -45,6 +44,7 @@ dependencies { compile "commons-logging:commons-logging:${versions.commonslogging}" // for http - testing reindex from remote testCompile project(path: ':modules:transport-netty3', configuration: 'runtime') + testCompile project(path: ':modules:transport-netty4', configuration: 'runtime') } dependencyLicenses { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java index fd251d96a1f..2be27a1a1ad 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Netty3Plugin; +import org.elasticsearch.transport.Netty4Plugin; import org.junit.After; import org.junit.Before; @@ -56,17 +57,47 @@ import static org.hamcrest.Matchers.hasSize; * tests won't verify that. */ public class RetryTests extends ESSingleNodeTestCase { + private static final int DOC_COUNT = 20; private List blockedExecutors = new ArrayList<>(); + private boolean useNetty4; + + @Before + public void setUp() throws Exception { + super.setUp(); + useNetty4 = randomBoolean(); + createIndex("source"); + // Build the test data. Don't use indexRandom because that won't work consistently with such small thread pools. + BulkRequestBuilder bulk = client().prepareBulk(); + for (int i = 0; i < DOC_COUNT; i++) { + bulk.add(client().prepareIndex("source", "test").setSource("foo", "bar " + i)); + } + Retry retry = Retry.on(EsRejectedExecutionException.class).policy(BackoffPolicy.exponentialBackoff()); + BulkResponse response = retry.withSyncBackoff(client(), bulk.request()); + assertFalse(response.buildFailureMessage(), response.hasFailures()); + client().admin().indices().prepareRefresh("source").get(); + } + + @After + public void forceUnblockAllExecutors() { + for (CyclicBarrier barrier: blockedExecutors) { + barrier.reset(); + } + } + @Override protected Collection> getPlugins() { - return pluginList(ReindexPlugin.class, Netty3Plugin.class, BogusPlugin.class); // we need netty here to http communication + return pluginList( + ReindexPlugin.class, + Netty3Plugin.class, + Netty4Plugin.class, + BogusPlugin.class); } public static final class BogusPlugin extends Plugin { - // se Netty3Plugin.... this runs without the permission from the netty3 module so it will fail since reindex can't set the property + // this runs without the permission from the netty module so it will fail since reindex can't set the property // to make it still work we disable that check but need to register the setting first private static final Setting ASSERT_NETTY_BUGLEVEL = Setting.boolSetting("netty.assert.buglevel", true, Setting.Property.NodeScope); @@ -94,30 +125,13 @@ public class RetryTests extends ESSingleNodeTestCase { settings.put(NetworkModule.HTTP_ENABLED.getKey(), true); // Whitelist reindexing from the http host we're going to use settings.put(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(), "myself"); + if (useNetty4) { + settings.put(NetworkModule.HTTP_TYPE_KEY, Netty4Plugin.NETTY_HTTP_TRANSPORT_NAME); + settings.put(NetworkModule.TRANSPORT_TYPE_KEY, Netty4Plugin.NETTY_TRANSPORT_NAME); + } return settings.build(); } - @Before - public void setupSourceIndex() throws Exception { - createIndex("source"); - // Build the test data. Don't use indexRandom because that won't work consistently with such small thread pools. - BulkRequestBuilder bulk = client().prepareBulk(); - for (int i = 0; i < DOC_COUNT; i++) { - bulk.add(client().prepareIndex("source", "test").setSource("foo", "bar " + i)); - } - Retry retry = Retry.on(EsRejectedExecutionException.class).policy(BackoffPolicy.exponentialBackoff()); - BulkResponse response = retry.withSyncBackoff(client(), bulk.request()); - assertFalse(response.buildFailureMessage(), response.hasFailures()); - client().admin().indices().prepareRefresh("source").get(); - } - - @After - public void forceUnblockAllExecutors() { - for (CyclicBarrier barrier: blockedExecutors) { - barrier.reset(); - } - } - public void testReindex() throws Exception { testCase(ReindexAction.NAME, ReindexAction.INSTANCE.newRequestBuilder(client()).source("source").destination("dest"), matcher().created(DOC_COUNT)); @@ -222,4 +236,5 @@ public class RetryTests extends ESSingleNodeTestCase { assertThat(response.getTasks(), hasSize(1)); return (BulkByScrollTask.Status) response.getTasks().get(0).getStatus(); } + } diff --git a/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequest.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequest.java index 39a5c94b239..abf23563b8a 100644 --- a/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequest.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequest.java @@ -26,40 +26,27 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.support.RestUtils; import org.jboss.netty.channel.Channel; import org.jboss.netty.handler.codec.http.HttpMethod; +import org.jboss.netty.handler.codec.http.HttpRequest; import java.net.SocketAddress; import java.util.HashMap; import java.util.Map; -/** - * - */ public class Netty3HttpRequest extends RestRequest { - private final org.jboss.netty.handler.codec.http.HttpRequest request; + private final HttpRequest request; private final Channel channel; - private final Map params; - private final String rawPath; private final BytesReference content; - public Netty3HttpRequest(org.jboss.netty.handler.codec.http.HttpRequest request, Channel channel) { + public Netty3HttpRequest(HttpRequest request, Channel channel) { + super(request.getUri()); this.request = request; this.channel = channel; - this.params = new HashMap<>(); if (request.getContent().readable()) { this.content = Netty3Utils.toBytesReference(request.getContent()); } else { this.content = BytesArray.EMPTY; } - - String uri = request.getUri(); - int pathEndPos = uri.indexOf('?'); - if (pathEndPos < 0) { - this.rawPath = uri; - } else { - this.rawPath = uri.substring(0, pathEndPos); - RestUtils.decodeQueryString(uri, pathEndPos + 1, params); - } } public org.jboss.netty.handler.codec.http.HttpRequest request() { @@ -97,16 +84,6 @@ public class Netty3HttpRequest extends RestRequest { return request.getUri(); } - @Override - public String rawPath() { - return rawPath; - } - - @Override - public Map params() { - return params; - } - @Override public boolean hasContent() { return content.length() > 0; @@ -153,22 +130,4 @@ public class Netty3HttpRequest extends RestRequest { return request.headers().entries(); } - @Override - public boolean hasParam(String key) { - return params.containsKey(key); - } - - @Override - public String param(String key) { - return params.get(key); - } - - @Override - public String param(String key, String defaultValue) { - String value = params.get(key); - if (value == null) { - return defaultValue; - } - return value; - } } diff --git a/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequestHandler.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequestHandler.java index 968eb6a24ab..829a2d9aa06 100644 --- a/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequestHandler.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequestHandler.java @@ -54,7 +54,6 @@ public class Netty3HttpRequestHandler extends SimpleChannelUpstreamHandler { request = (HttpRequest) e.getMessage(); } - threadContext.copyHeaders(request.headers()); // the netty HTTP handling always copy over the buffer to its own buffer, either in NioWorker internally // when reading, or using a cumalation buffer Netty3HttpRequest httpRequest = new Netty3HttpRequest(request, e.getChannel()); diff --git a/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java index edbcc74e646..25884d14b70 100644 --- a/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java @@ -120,29 +120,29 @@ public class Netty3HttpServerTransport extends AbstractLifecycleComponent implem public static Setting SETTING_HTTP_NETTY_MAX_CUMULATION_BUFFER_CAPACITY = Setting.byteSizeSetting("http.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), - Property.NodeScope); + Property.NodeScope, Property.Shared); public static Setting SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = - Setting.intSetting("http.netty.max_composite_buffer_components", -1, Property.NodeScope); + Setting.intSetting("http.netty.max_composite_buffer_components", -1, Property.NodeScope, Property.Shared); public static final Setting SETTING_HTTP_WORKER_COUNT = new Setting<>("http.netty.worker_count", (s) -> Integer.toString(EsExecutors.boundedNumberOfProcessors(s) * 2), - (s) -> Setting.parseInt(s, 1, "http.netty.worker_count"), Property.NodeScope); + (s) -> Setting.parseInt(s, 1, "http.netty.worker_count"), Property.NodeScope, Property.Shared); public static final Setting SETTING_HTTP_TCP_NO_DELAY = - boolSetting("http.tcp_no_delay", NetworkService.TcpSettings.TCP_NO_DELAY, Property.NodeScope); + boolSetting("http.tcp_no_delay", NetworkService.TcpSettings.TCP_NO_DELAY, Property.NodeScope, Property.Shared); public static final Setting SETTING_HTTP_TCP_KEEP_ALIVE = - boolSetting("http.tcp.keep_alive", NetworkService.TcpSettings.TCP_KEEP_ALIVE, Property.NodeScope); + boolSetting("http.tcp.keep_alive", NetworkService.TcpSettings.TCP_KEEP_ALIVE, Property.NodeScope, Property.Shared); public static final Setting SETTING_HTTP_TCP_BLOCKING_SERVER = - boolSetting("http.tcp.blocking_server", NetworkService.TcpSettings.TCP_BLOCKING_SERVER, Property.NodeScope); + boolSetting("http.tcp.blocking_server", NetworkService.TcpSettings.TCP_BLOCKING_SERVER, Property.NodeScope, Property.Shared); public static final Setting SETTING_HTTP_TCP_REUSE_ADDRESS = - boolSetting("http.tcp.reuse_address", NetworkService.TcpSettings.TCP_REUSE_ADDRESS, Property.NodeScope); + boolSetting("http.tcp.reuse_address", NetworkService.TcpSettings.TCP_REUSE_ADDRESS, Property.NodeScope, Property.Shared); public static final Setting SETTING_HTTP_TCP_SEND_BUFFER_SIZE = Setting.byteSizeSetting("http.tcp.send_buffer_size", NetworkService.TcpSettings.TCP_SEND_BUFFER_SIZE, - Property.NodeScope); + Property.NodeScope, Property.Shared); public static final Setting SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE = Setting.byteSizeSetting("http.tcp.receive_buffer_size", NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE, - Property.NodeScope); + Property.NodeScope, Property.Shared); public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE = Setting.byteSizeSetting("transport.netty.receive_predictor_size", settings -> { diff --git a/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3MessageChannelHandler.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3MessageChannelHandler.java index ff4dc27e2c8..e2ac10873cd 100644 --- a/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3MessageChannelHandler.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3MessageChannelHandler.java @@ -50,7 +50,7 @@ class Netty3MessageChannelHandler extends SimpleChannelUpstreamHandler { @Override public void writeComplete(ChannelHandlerContext ctx, WriteCompletionEvent e) throws Exception { - transportServiceAdapter.sent(e.getWrittenAmount()); + transportServiceAdapter.addBytesSent(e.getWrittenAmount()); super.writeComplete(ctx, e); } diff --git a/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Transport.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Transport.java index 5ca2252cf2c..f5f32044eb7 100644 --- a/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Transport.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Transport.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.Booleans; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.network.NetworkService.TcpSettings; @@ -45,6 +46,7 @@ import org.elasticsearch.transport.TransportServiceAdapter; import org.elasticsearch.transport.TransportSettings; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.bootstrap.ServerBootstrap; +import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.AdaptiveReceiveBufferSizePredictorFactory; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; @@ -93,31 +95,36 @@ public class Netty3Transport extends TcpTransport { public static final Setting WORKER_COUNT = new Setting<>("transport.netty.worker_count", (s) -> Integer.toString(EsExecutors.boundedNumberOfProcessors(s) * 2), - (s) -> Setting.parseInt(s, 1, "transport.netty.worker_count"), Property.NodeScope); + (s) -> Setting.parseInt(s, 1, "transport.netty.worker_count"), Property.NodeScope, Property.Shared); public static final Setting NETTY_MAX_CUMULATION_BUFFER_CAPACITY = - Setting.byteSizeSetting("transport.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), Property.NodeScope); + Setting.byteSizeSetting( + "transport.netty.max_cumulation_buffer_capacity", + new ByteSizeValue(-1), + Property.NodeScope, + Property.Shared); public static final Setting NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = - Setting.intSetting("transport.netty.max_composite_buffer_components", -1, -1, Property.NodeScope); + Setting.intSetting("transport.netty.max_composite_buffer_components", -1, -1, Property.NodeScope, Property.Shared); // See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one public static final Setting NETTY_RECEIVE_PREDICTOR_SIZE = Setting.byteSizeSetting( - "transport.netty.receive_predictor_size", - settings -> { - long defaultReceiverPredictor = 512 * 1024; - if (JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes() > 0) { - // we can guess a better default... - long l = (long) ((0.3 * JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes()) / WORKER_COUNT.get(settings)); - defaultReceiverPredictor = Math.min(defaultReceiverPredictor, Math.max(l, 64 * 1024)); - } - return new ByteSizeValue(defaultReceiverPredictor).toString(); - }, Property.NodeScope); + "transport.netty.receive_predictor_size", + settings -> { + long defaultReceiverPredictor = 512 * 1024; + if (JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes() > 0) { + // we can guess a better default... + long l = (long) ((0.3 * JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes()) / WORKER_COUNT.get(settings)); + defaultReceiverPredictor = Math.min(defaultReceiverPredictor, Math.max(l, 64 * 1024)); + } + return new ByteSizeValue(defaultReceiverPredictor).toString(); + }, Property.NodeScope, + Property.Shared); public static final Setting NETTY_RECEIVE_PREDICTOR_MIN = - byteSizeSetting("transport.netty.receive_predictor_min", NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope); + byteSizeSetting("transport.netty.receive_predictor_min", NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope, Property.Shared); public static final Setting NETTY_RECEIVE_PREDICTOR_MAX = - byteSizeSetting("transport.netty.receive_predictor_max", NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope); + byteSizeSetting("transport.netty.receive_predictor_max", NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope, Property.Shared); public static final Setting NETTY_BOSS_COUNT = - intSetting("transport.netty.boss_count", 1, 1, Property.NodeScope); + intSetting("transport.netty.boss_count", 1, 1, Property.NodeScope, Property.Shared); protected final ByteSizeValue maxCumulationBufferCapacity; @@ -557,4 +564,5 @@ public class Netty3Transport extends TcpTransport { } }); } + } diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/ESNetty3IntegTestCase.java b/modules/transport-netty3/src/test/java/org/elasticsearch/ESNetty3IntegTestCase.java index a7f8f254fcc..3c90bc4ccea 100644 --- a/modules/transport-netty3/src/test/java/org/elasticsearch/ESNetty3IntegTestCase.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/ESNetty3IntegTestCase.java @@ -47,15 +47,15 @@ public abstract class ESNetty3IntegTestCase extends ESIntegTestCase { if (randomBoolean()) { builder.put(Netty3Transport.WORKER_COUNT.getKey(), random().nextInt(3) + 1); } - builder.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty3"); - builder.put(NetworkModule.HTTP_TYPE_KEY, "netty3"); + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, Netty3Plugin.NETTY_TRANSPORT_NAME); + builder.put(NetworkModule.HTTP_TYPE_KEY, Netty3Plugin.NETTY_HTTP_TRANSPORT_NAME); return builder.build(); } @Override protected Settings transportClientSettings() { Settings.Builder builder = Settings.builder().put(super.transportClientSettings()); - builder.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty3"); + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, Netty3Plugin.NETTY_TRANSPORT_NAME); return builder.build(); } diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortIntegrationIT.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortIntegrationIT.java index 8b9b2d397db..d25951e254c 100644 --- a/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortIntegrationIT.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortIntegrationIT.java @@ -73,7 +73,7 @@ public class Netty3TransportMultiPortIntegrationIT extends ESNetty3IntegTestCase public void testThatTransportClientCanConnect() throws Exception { Settings settings = Settings.builder() .put("cluster.name", internalCluster().getClusterName()) - .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty3") + .put(NetworkModule.TRANSPORT_TYPE_KEY, Netty3Plugin.NETTY_TRANSPORT_NAME) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); try (TransportClient transportClient = new MockTransportClient(settings, Netty3Plugin.class)) { diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle new file mode 100644 index 00000000000..82d98a981ab --- /dev/null +++ b/modules/transport-netty4/build.gradle @@ -0,0 +1,170 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + TODOs: + * fix permissions such that only netty4 can open sockets etc? + * fix the hack in the build framework that copies transport-netty4 into the integ test cluster + * maybe figure out a way to run all tests from core with netty4/network? + */ +esplugin { + description 'Netty 4 based transport implementation' + classname 'org.elasticsearch.transport.Netty4Plugin' + hasClientJar = true +} + +compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked" + +dependencies { + // network stack + compile "io.netty:netty-buffer:4.1.4.Final-elastic-SNAPSHOT" + compile "io.netty:netty-codec:4.1.4.Final-elastic-SNAPSHOT" + compile "io.netty:netty-codec-http:4.1.4.Final-elastic-SNAPSHOT" + compile "io.netty:netty-common:4.1.4.Final-elastic-SNAPSHOT" + compile "io.netty:netty-handler:4.1.4.Final-elastic-SNAPSHOT" + compile "io.netty:netty-resolver:4.1.4.Final-elastic-SNAPSHOT" + compile "io.netty:netty-transport:4.1.4.Final-elastic-SNAPSHOT" +} + +integTest { + includePackaged = true + cluster { + setting 'http.type', 'netty4' + setting 'transport.type', 'netty4' + numNodes = 2 + } +} + +thirdPartyAudit.excludes = [ + // classes are missing + + // from io.netty.handler.codec.protobuf.ProtobufDecoder (netty) + 'com.google.protobuf.ExtensionRegistry', + 'com.google.protobuf.MessageLite$Builder', + 'com.google.protobuf.MessageLite', + 'com.google.protobuf.Parser', + + // from io.netty.logging.CommonsLoggerFactory (netty) + 'org.apache.commons.logging.Log', + 'org.apache.commons.logging.LogFactory', + + // from io.netty.handler.ssl.OpenSslEngine (netty) + 'org.apache.tomcat.jni.Buffer', + 'org.apache.tomcat.jni.Library', + 'org.apache.tomcat.jni.Pool', + 'org.apache.tomcat.jni.SSL', + 'org.apache.tomcat.jni.SSLContext', + + // from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty) + 'org.bouncycastle.asn1.x500.X500Name', + 'org.bouncycastle.cert.X509v3CertificateBuilder', + 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', + 'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder', + 'org.bouncycastle.jce.provider.BouncyCastleProvider', + 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', + + // from io.netty.handler.ssl.JettyNpnSslEngine (netty) + 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', + 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', + 'org.eclipse.jetty.npn.NextProtoNego', + + // from io.netty.handler.codec.marshalling.ChannelBufferByteInput (netty) + 'org.jboss.marshalling.ByteInput', + + // from io.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty) + 'org.jboss.marshalling.ByteOutput', + + // from io.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty) + 'org.jboss.marshalling.Marshaller', + + // from io.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty) + 'org.jboss.marshalling.MarshallerFactory', + 'org.jboss.marshalling.MarshallingConfiguration', + 'org.jboss.marshalling.Unmarshaller', + + // from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', + + 'com.google.protobuf.ExtensionRegistryLite', + 'com.google.protobuf.MessageLiteOrBuilder', + 'com.google.protobuf.nano.CodedOutputByteBufferNano', + 'com.google.protobuf.nano.MessageNano', + 'com.jcraft.jzlib.Deflater', + 'com.jcraft.jzlib.Inflater', + 'com.jcraft.jzlib.JZlib$WrapperType', + 'com.jcraft.jzlib.JZlib', + 'com.ning.compress.BufferRecycler', + 'com.ning.compress.lzf.ChunkDecoder', + 'com.ning.compress.lzf.ChunkEncoder', + 'com.ning.compress.lzf.LZFEncoder', + 'com.ning.compress.lzf.util.ChunkDecoderFactory', + 'com.ning.compress.lzf.util.ChunkEncoderFactory', + 'javassist/ClassClassPath', + 'javassist/ClassPath', + 'javassist/ClassPool', + 'javassist.CtClass', + 'javassist.CtMethod', + 'lzma.sdk.lzma.Encoder', + 'net.jpountz.lz4.LZ4Compressor', + 'net.jpountz.lz4.LZ4Factory', + 'net.jpountz.lz4.LZ4FastDecompressor', + 'net.jpountz.xxhash.StreamingXXHash32', + 'net.jpountz.xxhash.XXHashFactory', + 'org.apache.logging.log4j.LogManager', + 'org.apache.logging.log4j.Logger', + 'org.apache.tomcat.jni.CertificateRequestedCallback', + 'org.apache.tomcat.jni.CertificateVerifier', + 'org.apache.tomcat.jni.SessionTicketKey', + 'org.eclipse.jetty.alpn.ALPN$ClientProvider', + 'org.eclipse.jetty.alpn.ALPN$ServerProvider', + 'org.eclipse.jetty.alpn.ALPN', + + 'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', + 'io.netty.util.internal.PlatformDependent0', + 'io.netty.util.internal.UnsafeAtomicIntegerFieldUpdater', + 'io.netty.util.internal.UnsafeAtomicLongFieldUpdater', + 'io.netty.util.internal.UnsafeAtomicReferenceFieldUpdater', + 'io.netty.util.internal.chmv8.ConcurrentHashMapV8', + 'io.netty.util.internal.chmv8.ConcurrentHashMapV8$1', + 'io.netty.util.internal.chmv8.ConcurrentHashMapV8$TreeBin', + 'io.netty.util.internal.chmv8.CountedCompleter', + 'io.netty.util.internal.chmv8.CountedCompleter$1', + 'io.netty.util.internal.chmv8.ForkJoinPool', + 'io.netty.util.internal.chmv8.ForkJoinPool$2', + 'io.netty.util.internal.chmv8.ForkJoinPool$WorkQueue', + 'io.netty.util.internal.chmv8.ForkJoinTask', + 'io.netty.util.internal.chmv8.ForkJoinTask$1', + 'io.netty.util.internal.chmv8.Striped64', + 'io.netty.util.internal.chmv8.Striped64$1', + 'io.netty.util.internal.chmv8.Striped64$Cell', + 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef', + 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef', + 'io.netty.util.internal.shaded.org.jctools.queues.ConcurrentSequencedCircularArrayQueue', + 'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode', + 'io.netty.util.internal.shaded.org.jctools.queues.MpmcArrayQueueConsumerField', + 'io.netty.util.internal.shaded.org.jctools.queues.MpmcArrayQueueProducerField', + 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerField', + 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueHeadLimitField', + 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueTailField', + 'io.netty.util.internal.shaded.org.jctools.queues.MpscChunkedArrayQueue', + 'io.netty.util.internal.shaded.org.jctools.util.JvmInfo', + 'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess', + 'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess', +] diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 new file mode 100644 index 00000000000..e45d72cb77e --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 @@ -0,0 +1 @@ +bae159f72e78d211733db0f13b0d8436a65af5ac \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-buffer-LICENSE.txt b/modules/transport-netty4/licenses/netty-buffer-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/modules/transport-netty4/licenses/netty-buffer-NOTICE.txt b/modules/transport-netty4/licenses/netty-buffer-NOTICE.txt new file mode 100644 index 00000000000..5bbf91a14de --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-NOTICE.txt @@ -0,0 +1,116 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2011 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified version of 'JZlib', a re-implementation of +zlib in pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD Style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product contains a modified version of 'Webbit', a Java event based +WebSocket and HTTP server: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * http://code.google.com/p/protobuf/ + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'SLF4J', a simple logging facade for Java, +which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'JBoss Logging', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-logging.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://anonsvn.jboss.org/repos/common/common-logging-spi/ + +This product optionally depends on 'Apache Felix', an open source OSGi +framework implementation, which can be obtained at: + + * LICENSE: + * license/LICENSE.felix.txt (Apache License 2.0) + * HOMEPAGE: + * http://felix.apache.org/ diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 new file mode 100644 index 00000000000..a1872082d0f --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 @@ -0,0 +1 @@ +a8fb268e5756768ed9ddc46806cf724481c14298 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-LICENSE.txt b/modules/transport-netty4/licenses/netty-codec-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/modules/transport-netty4/licenses/netty-codec-NOTICE.txt b/modules/transport-netty4/licenses/netty-codec-NOTICE.txt new file mode 100644 index 00000000000..5bbf91a14de --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-NOTICE.txt @@ -0,0 +1,116 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2011 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified version of 'JZlib', a re-implementation of +zlib in pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD Style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product contains a modified version of 'Webbit', a Java event based +WebSocket and HTTP server: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * http://code.google.com/p/protobuf/ + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'SLF4J', a simple logging facade for Java, +which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'JBoss Logging', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-logging.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://anonsvn.jboss.org/repos/common/common-logging-spi/ + +This product optionally depends on 'Apache Felix', an open source OSGi +framework implementation, which can be obtained at: + + * LICENSE: + * license/LICENSE.felix.txt (Apache License 2.0) + * HOMEPAGE: + * http://felix.apache.org/ diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 new file mode 100644 index 00000000000..7c96920d161 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 @@ -0,0 +1 @@ +8219258943eaa26115a8f39b16163b4d0e53770e \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-LICENSE.txt b/modules/transport-netty4/licenses/netty-codec-http-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/modules/transport-netty4/licenses/netty-codec-http-NOTICE.txt b/modules/transport-netty4/licenses/netty-codec-http-NOTICE.txt new file mode 100644 index 00000000000..5bbf91a14de --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-NOTICE.txt @@ -0,0 +1,116 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2011 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified version of 'JZlib', a re-implementation of +zlib in pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD Style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product contains a modified version of 'Webbit', a Java event based +WebSocket and HTTP server: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * http://code.google.com/p/protobuf/ + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'SLF4J', a simple logging facade for Java, +which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'JBoss Logging', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-logging.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://anonsvn.jboss.org/repos/common/common-logging-spi/ + +This product optionally depends on 'Apache Felix', an open source OSGi +framework implementation, which can be obtained at: + + * LICENSE: + * license/LICENSE.felix.txt (Apache License 2.0) + * HOMEPAGE: + * http://felix.apache.org/ diff --git a/modules/transport-netty4/licenses/netty-common-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 new file mode 100644 index 00000000000..0e775de8650 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 @@ -0,0 +1 @@ +8f2a9aafddf7a5db56e6c7dbbc94834e5bdeb186 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-LICENSE.txt b/modules/transport-netty4/licenses/netty-common-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/modules/transport-netty4/licenses/netty-common-NOTICE.txt b/modules/transport-netty4/licenses/netty-common-NOTICE.txt new file mode 100644 index 00000000000..5bbf91a14de --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-NOTICE.txt @@ -0,0 +1,116 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2011 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified version of 'JZlib', a re-implementation of +zlib in pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD Style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product contains a modified version of 'Webbit', a Java event based +WebSocket and HTTP server: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * http://code.google.com/p/protobuf/ + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'SLF4J', a simple logging facade for Java, +which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'JBoss Logging', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-logging.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://anonsvn.jboss.org/repos/common/common-logging-spi/ + +This product optionally depends on 'Apache Felix', an open source OSGi +framework implementation, which can be obtained at: + + * LICENSE: + * license/LICENSE.felix.txt (Apache License 2.0) + * HOMEPAGE: + * http://felix.apache.org/ diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 new file mode 100644 index 00000000000..e91cb888a44 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 @@ -0,0 +1 @@ +99a2d3a7285195c7f42d25f236212b984d0dfcb7 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-LICENSE.txt b/modules/transport-netty4/licenses/netty-handler-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/modules/transport-netty4/licenses/netty-handler-NOTICE.txt b/modules/transport-netty4/licenses/netty-handler-NOTICE.txt new file mode 100644 index 00000000000..5bbf91a14de --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-NOTICE.txt @@ -0,0 +1,116 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2011 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified version of 'JZlib', a re-implementation of +zlib in pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD Style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product contains a modified version of 'Webbit', a Java event based +WebSocket and HTTP server: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * http://code.google.com/p/protobuf/ + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'SLF4J', a simple logging facade for Java, +which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'JBoss Logging', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-logging.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://anonsvn.jboss.org/repos/common/common-logging-spi/ + +This product optionally depends on 'Apache Felix', an open source OSGi +framework implementation, which can be obtained at: + + * LICENSE: + * license/LICENSE.felix.txt (Apache License 2.0) + * HOMEPAGE: + * http://felix.apache.org/ diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 new file mode 100644 index 00000000000..951fcbf0f80 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 @@ -0,0 +1 @@ +1f832049b94980a9f8d01ff87a83170c5a03f7eb \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-LICENSE.txt b/modules/transport-netty4/licenses/netty-resolver-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/modules/transport-netty4/licenses/netty-resolver-NOTICE.txt b/modules/transport-netty4/licenses/netty-resolver-NOTICE.txt new file mode 100644 index 00000000000..5bbf91a14de --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-NOTICE.txt @@ -0,0 +1,116 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2011 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified version of 'JZlib', a re-implementation of +zlib in pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD Style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product contains a modified version of 'Webbit', a Java event based +WebSocket and HTTP server: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * http://code.google.com/p/protobuf/ + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'SLF4J', a simple logging facade for Java, +which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'JBoss Logging', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-logging.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://anonsvn.jboss.org/repos/common/common-logging-spi/ + +This product optionally depends on 'Apache Felix', an open source OSGi +framework implementation, which can be obtained at: + + * LICENSE: + * license/LICENSE.felix.txt (Apache License 2.0) + * HOMEPAGE: + * http://felix.apache.org/ diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 new file mode 100644 index 00000000000..4b7743836ca --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-4.1.4.Final-elastic-SNAPSHOT.jar.sha1 @@ -0,0 +1 @@ +16a2ffc9a952a8c1deccca462e7f9965608db2b2 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-LICENSE.txt b/modules/transport-netty4/licenses/netty-transport-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/modules/transport-netty4/licenses/netty-transport-NOTICE.txt b/modules/transport-netty4/licenses/netty-transport-NOTICE.txt new file mode 100644 index 00000000000..5bbf91a14de --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-NOTICE.txt @@ -0,0 +1,116 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2011 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified version of 'JZlib', a re-implementation of +zlib in pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD Style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product contains a modified version of 'Webbit', a Java event based +WebSocket and HTTP server: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * http://code.google.com/p/protobuf/ + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'SLF4J', a simple logging facade for Java, +which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'JBoss Logging', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-logging.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://anonsvn.jboss.org/repos/common/common-logging-spi/ + +This product optionally depends on 'Apache Felix', an open source OSGi +framework implementation, which can be obtained at: + + * LICENSE: + * license/LICENSE.felix.txt (Apache License 2.0) + * HOMEPAGE: + * http://felix.apache.org/ diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java new file mode 100644 index 00000000000..62443dc541e --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java @@ -0,0 +1,268 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFutureListener; +import io.netty.channel.ChannelPromise; +import io.netty.handler.codec.http.DefaultFullHttpResponse; +import io.netty.handler.codec.http.FullHttpRequest; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpHeaderValues; +import io.netty.handler.codec.http.HttpHeaders; +import io.netty.handler.codec.http.HttpResponse; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.netty.handler.codec.http.HttpVersion; +import io.netty.handler.codec.http.cookie.ServerCookieDecoder; +import io.netty.handler.codec.http.cookie.ServerCookieEncoder; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.http.netty4.cors.Netty4CorsHandler; +import org.elasticsearch.http.netty4.pipelining.HttpPipelinedRequest; +import org.elasticsearch.rest.AbstractRestChannel; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.transport.netty4.Netty4Utils; + +import java.util.Collections; +import java.util.EnumMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +final class Netty4HttpChannel extends AbstractRestChannel { + + private final Netty4HttpServerTransport transport; + private final Channel channel; + private final FullHttpRequest nettyRequest; + private final HttpPipelinedRequest pipelinedRequest; + private final ThreadContext threadContext; + + /** + * @param transport The corresponding NettyHttpServerTransport where this channel belongs to. + * @param request The request that is handled by this channel. + * @param pipelinedRequest If HTTP pipelining is enabled provide the corresponding pipelined request. May be null if + * HTTP pipelining is disabled. + * @param detailedErrorsEnabled true iff error messages should include stack traces. + * @param threadContext the thread context for the channel + */ + Netty4HttpChannel( + final Netty4HttpServerTransport transport, + final Netty4HttpRequest request, + final HttpPipelinedRequest pipelinedRequest, + final boolean detailedErrorsEnabled, + final ThreadContext threadContext) { + super(request, detailedErrorsEnabled); + this.transport = transport; + this.channel = request.getChannel(); + this.nettyRequest = request.request(); + this.pipelinedRequest = pipelinedRequest; + this.threadContext = threadContext; + } + + @Override + public BytesStreamOutput newBytesOutput() { + return new ReleasableBytesStreamOutput(transport.bigArrays); + } + + + @Override + public void sendResponse(RestResponse response) { + // if the response object was created upstream, then use it; + // otherwise, create a new one + ByteBuf buffer = Netty4Utils.toByteBuf(response.content()); + FullHttpResponse resp = newResponse(buffer); + resp.setStatus(getStatus(response.status())); + + Netty4CorsHandler.setCorsResponseHeaders(nettyRequest, resp, transport.getCorsConfig()); + + String opaque = nettyRequest.headers().get("X-Opaque-Id"); + if (opaque != null) { + setHeaderField(resp, "X-Opaque-Id", opaque); + } + + // Add all custom headers + addCustomHeaders(resp, response.getHeaders()); + addCustomHeaders(resp, threadContext.getResponseHeaders()); + + BytesReference content = response.content(); + boolean release = content instanceof Releasable; + try { + // If our response doesn't specify a content-type header, set one + setHeaderField(resp, HttpHeaderNames.CONTENT_TYPE.toString(), response.contentType(), false); + // If our response has no content-length, calculate and set one + setHeaderField(resp, HttpHeaderNames.CONTENT_LENGTH.toString(), String.valueOf(buffer.readableBytes()), false); + + addCookies(resp); + + final ChannelPromise promise = channel.newPromise(); + + if (release) { + promise.addListener(f -> ((Releasable)content).close()); + release = false; + } + + if (isCloseConnection()) { + promise.addListener(ChannelFutureListener.CLOSE); + } + + if (pipelinedRequest != null) { + channel.writeAndFlush(pipelinedRequest.createHttpResponse(resp, promise)); + } else { + channel.writeAndFlush(resp, promise); + } + + } finally { + if (release) { + ((Releasable) content).close(); + } + if (pipelinedRequest != null) { + pipelinedRequest.release(); + } + } + } + + private void setHeaderField(HttpResponse resp, String headerField, String value) { + setHeaderField(resp, headerField, value, true); + } + + private void setHeaderField(HttpResponse resp, String headerField, String value, boolean override) { + if (override || !resp.headers().contains(headerField)) { + resp.headers().add(headerField, value); + } + } + + private void addCookies(HttpResponse resp) { + if (transport.resetCookies) { + String cookieString = nettyRequest.headers().get(HttpHeaders.Names.COOKIE); + if (cookieString != null) { + Set cookies = ServerCookieDecoder.STRICT.decode(cookieString); + if (!cookies.isEmpty()) { + // Reset the cookies if necessary. + resp.headers().set(HttpHeaderNames.SET_COOKIE, ServerCookieEncoder.STRICT.encode(cookies)); + } + } + } + } + + private void addCustomHeaders(HttpResponse response, Map> customHeaders) { + if (customHeaders != null) { + for (Map.Entry> headerEntry : customHeaders.entrySet()) { + for (String headerValue : headerEntry.getValue()) { + setHeaderField(response, headerEntry.getKey(), headerValue); + } + } + } + } + + // Determine if the request protocol version is HTTP 1.0 + private boolean isHttp10() { + return nettyRequest.protocolVersion().equals(HttpVersion.HTTP_1_0); + } + + // Determine if the request connection should be closed on completion. + private boolean isCloseConnection() { + final boolean http10 = isHttp10(); + return HttpHeaderValues.CLOSE.equals(nettyRequest.headers().get(HttpHeaderNames.CONNECTION)) || + (http10 && HttpHeaderValues.KEEP_ALIVE.equals(nettyRequest.headers().get(HttpHeaderNames.CONNECTION)) == false); + } + + // Create a new {@link HttpResponse} to transmit the response for the netty request. + private FullHttpResponse newResponse(ByteBuf buffer) { + final boolean http10 = isHttp10(); + final boolean close = isCloseConnection(); + // Build the response object. + final HttpResponseStatus status = HttpResponseStatus.OK; // default to initialize + final FullHttpResponse response; + if (http10) { + response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_0, status, buffer); + if (!close) { + response.headers().add(HttpHeaderNames.CONNECTION, "Keep-Alive"); + } + } else { + response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status, buffer); + } + return response; + } + + private static final HttpResponseStatus TOO_MANY_REQUESTS = new HttpResponseStatus(429, "Too Many Requests"); + + private static Map MAP; + + static { + EnumMap map = new EnumMap<>(RestStatus.class); + map.put(RestStatus.CONTINUE, HttpResponseStatus.CONTINUE); + map.put(RestStatus.SWITCHING_PROTOCOLS, HttpResponseStatus.SWITCHING_PROTOCOLS); + map.put(RestStatus.OK, HttpResponseStatus.OK); + map.put(RestStatus.CREATED, HttpResponseStatus.CREATED); + map.put(RestStatus.ACCEPTED, HttpResponseStatus.ACCEPTED); + map.put(RestStatus.NON_AUTHORITATIVE_INFORMATION, HttpResponseStatus.NON_AUTHORITATIVE_INFORMATION); + map.put(RestStatus.NO_CONTENT, HttpResponseStatus.NO_CONTENT); + map.put(RestStatus.RESET_CONTENT, HttpResponseStatus.RESET_CONTENT); + map.put(RestStatus.PARTIAL_CONTENT, HttpResponseStatus.PARTIAL_CONTENT); + map.put(RestStatus.MULTI_STATUS, HttpResponseStatus.INTERNAL_SERVER_ERROR); // no status for this?? + map.put(RestStatus.MULTIPLE_CHOICES, HttpResponseStatus.MULTIPLE_CHOICES); + map.put(RestStatus.MOVED_PERMANENTLY, HttpResponseStatus.MOVED_PERMANENTLY); + map.put(RestStatus.FOUND, HttpResponseStatus.FOUND); + map.put(RestStatus.SEE_OTHER, HttpResponseStatus.SEE_OTHER); + map.put(RestStatus.NOT_MODIFIED, HttpResponseStatus.NOT_MODIFIED); + map.put(RestStatus.USE_PROXY, HttpResponseStatus.USE_PROXY); + map.put(RestStatus.TEMPORARY_REDIRECT, HttpResponseStatus.TEMPORARY_REDIRECT); + map.put(RestStatus.BAD_REQUEST, HttpResponseStatus.BAD_REQUEST); + map.put(RestStatus.UNAUTHORIZED, HttpResponseStatus.UNAUTHORIZED); + map.put(RestStatus.PAYMENT_REQUIRED, HttpResponseStatus.PAYMENT_REQUIRED); + map.put(RestStatus.FORBIDDEN, HttpResponseStatus.FORBIDDEN); + map.put(RestStatus.NOT_FOUND, HttpResponseStatus.NOT_FOUND); + map.put(RestStatus.METHOD_NOT_ALLOWED, HttpResponseStatus.METHOD_NOT_ALLOWED); + map.put(RestStatus.NOT_ACCEPTABLE, HttpResponseStatus.NOT_ACCEPTABLE); + map.put(RestStatus.PROXY_AUTHENTICATION, HttpResponseStatus.PROXY_AUTHENTICATION_REQUIRED); + map.put(RestStatus.REQUEST_TIMEOUT, HttpResponseStatus.REQUEST_TIMEOUT); + map.put(RestStatus.CONFLICT, HttpResponseStatus.CONFLICT); + map.put(RestStatus.GONE, HttpResponseStatus.GONE); + map.put(RestStatus.LENGTH_REQUIRED, HttpResponseStatus.LENGTH_REQUIRED); + map.put(RestStatus.PRECONDITION_FAILED, HttpResponseStatus.PRECONDITION_FAILED); + map.put(RestStatus.REQUEST_ENTITY_TOO_LARGE, HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE); + map.put(RestStatus.REQUEST_URI_TOO_LONG, HttpResponseStatus.REQUEST_URI_TOO_LONG); + map.put(RestStatus.UNSUPPORTED_MEDIA_TYPE, HttpResponseStatus.UNSUPPORTED_MEDIA_TYPE); + map.put(RestStatus.REQUESTED_RANGE_NOT_SATISFIED, HttpResponseStatus.REQUESTED_RANGE_NOT_SATISFIABLE); + map.put(RestStatus.EXPECTATION_FAILED, HttpResponseStatus.EXPECTATION_FAILED); + map.put(RestStatus.UNPROCESSABLE_ENTITY, HttpResponseStatus.BAD_REQUEST); + map.put(RestStatus.LOCKED, HttpResponseStatus.BAD_REQUEST); + map.put(RestStatus.FAILED_DEPENDENCY, HttpResponseStatus.BAD_REQUEST); + map.put(RestStatus.TOO_MANY_REQUESTS, TOO_MANY_REQUESTS); + map.put(RestStatus.INTERNAL_SERVER_ERROR, HttpResponseStatus.INTERNAL_SERVER_ERROR); + map.put(RestStatus.NOT_IMPLEMENTED, HttpResponseStatus.NOT_IMPLEMENTED); + map.put(RestStatus.BAD_GATEWAY, HttpResponseStatus.BAD_GATEWAY); + map.put(RestStatus.SERVICE_UNAVAILABLE, HttpResponseStatus.SERVICE_UNAVAILABLE); + map.put(RestStatus.GATEWAY_TIMEOUT, HttpResponseStatus.GATEWAY_TIMEOUT); + map.put(RestStatus.HTTP_VERSION_NOT_SUPPORTED, HttpResponseStatus.HTTP_VERSION_NOT_SUPPORTED); + MAP = Collections.unmodifiableMap(map); + } + + private static HttpResponseStatus getStatus(RestStatus status) { + return MAP.getOrDefault(status, HttpResponseStatus.INTERNAL_SERVER_ERROR); + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java new file mode 100644 index 00000000000..949faa848e7 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java @@ -0,0 +1,134 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4; + +import io.netty.handler.codec.http.FullHttpRequest; +import io.netty.handler.codec.http.HttpRequest; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.transport.netty4.Netty4Utils; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.support.RestUtils; +import io.netty.channel.Channel; +import io.netty.handler.codec.http.HttpMethod; + +import java.net.SocketAddress; +import java.util.HashMap; +import java.util.Map; + +class Netty4HttpRequest extends RestRequest { + + private final FullHttpRequest request; + private final Channel channel; + private final BytesReference content; + + Netty4HttpRequest(FullHttpRequest request, Channel channel) { + super(request.uri()); + this.request = request; + this.channel = channel; + if (request.content().isReadable()) { + this.content = Netty4Utils.toBytesReference(request.content()); + } else { + this.content = BytesArray.EMPTY; + } + } + + public FullHttpRequest request() { + return this.request; + } + + @Override + public Method method() { + HttpMethod httpMethod = request.method(); + if (httpMethod == HttpMethod.GET) + return Method.GET; + + if (httpMethod == HttpMethod.POST) + return Method.POST; + + if (httpMethod == HttpMethod.PUT) + return Method.PUT; + + if (httpMethod == HttpMethod.DELETE) + return Method.DELETE; + + if (httpMethod == HttpMethod.HEAD) { + return Method.HEAD; + } + + if (httpMethod == HttpMethod.OPTIONS) { + return Method.OPTIONS; + } + + return Method.GET; + } + + @Override + public String uri() { + return request.uri(); + } + + @Override + public boolean hasContent() { + return content.length() > 0; + } + + @Override + public BytesReference content() { + return content; + } + + /** + * Returns the remote address where this rest request channel is "connected to". The + * returned {@link SocketAddress} is supposed to be down-cast into more + * concrete type such as {@link java.net.InetSocketAddress} to retrieve + * the detailed information. + */ + @Override + public SocketAddress getRemoteAddress() { + return channel.remoteAddress(); + } + + /** + * Returns the local address where this request channel is bound to. The returned + * {@link SocketAddress} is supposed to be down-cast into more concrete + * type such as {@link java.net.InetSocketAddress} to retrieve the detailed + * information. + */ + @Override + public SocketAddress getLocalAddress() { + return channel.localAddress(); + } + + public Channel getChannel() { + return channel; + } + + @Override + public String header(String name) { + return request.headers().get(name); + } + + @Override + public Iterable> headers() { + return request.headers().entries(); + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java new file mode 100644 index 00000000000..a4402d4816f --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4; + +import io.netty.buffer.Unpooled; +import io.netty.channel.ChannelHandler; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; +import io.netty.handler.codec.http.DefaultFullHttpRequest; +import io.netty.handler.codec.http.FullHttpRequest; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.http.netty4.pipelining.HttpPipelinedRequest; + +@ChannelHandler.Sharable +class Netty4HttpRequestHandler extends SimpleChannelInboundHandler { + + private final Netty4HttpServerTransport serverTransport; + private final boolean httpPipeliningEnabled; + private final boolean detailedErrorsEnabled; + private final ThreadContext threadContext; + + Netty4HttpRequestHandler(Netty4HttpServerTransport serverTransport, boolean detailedErrorsEnabled, ThreadContext threadContext) { + this.serverTransport = serverTransport; + this.httpPipeliningEnabled = serverTransport.pipelining; + this.detailedErrorsEnabled = detailedErrorsEnabled; + this.threadContext = threadContext; + } + + @Override + protected void channelRead0(ChannelHandlerContext ctx, Object msg) throws Exception { + final FullHttpRequest request; + final HttpPipelinedRequest pipelinedRequest; + if (this.httpPipeliningEnabled && msg instanceof HttpPipelinedRequest) { + pipelinedRequest = (HttpPipelinedRequest) msg; + request = (FullHttpRequest) pipelinedRequest.last(); + } else { + pipelinedRequest = null; + request = (FullHttpRequest) msg; + } + + final FullHttpRequest copy = + new DefaultFullHttpRequest( + request.protocolVersion(), + request.method(), + request.uri(), + Unpooled.copiedBuffer(request.content()), + request.headers(), + request.trailingHeaders()); + + final Netty4HttpRequest httpRequest = new Netty4HttpRequest(copy, ctx.channel()); + serverTransport.dispatchRequest( + httpRequest, + new Netty4HttpChannel(serverTransport, httpRequest, pipelinedRequest, detailedErrorsEnabled, threadContext)); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + serverTransport.exceptionCaught(ctx, cause); + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java new file mode 100644 index 00000000000..910a499a194 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -0,0 +1,571 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4; + +import com.carrotsearch.hppc.IntHashSet; +import com.carrotsearch.hppc.IntSet; +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.AdaptiveRecvByteBufAllocator; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelHandler; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.FixedRecvByteBufAllocator; +import io.netty.channel.RecvByteBufAllocator; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.oio.OioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.channel.socket.oio.OioServerSocketChannel; +import io.netty.handler.codec.ByteToMessageDecoder; +import io.netty.handler.codec.http.HttpContentCompressor; +import io.netty.handler.codec.http.HttpContentDecompressor; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpObjectAggregator; +import io.netty.handler.codec.http.HttpRequestDecoder; +import io.netty.handler.codec.http.HttpResponseEncoder; +import io.netty.handler.timeout.ReadTimeoutException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.NetworkExceptionHelper; +import org.elasticsearch.common.transport.PortsRange; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.http.BindHttpException; +import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.http.HttpServerAdapter; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.http.HttpStats; +import org.elasticsearch.http.netty4.cors.Netty4CorsConfig; +import org.elasticsearch.http.netty4.cors.Netty4CorsConfigBuilder; +import org.elasticsearch.http.netty4.cors.Netty4CorsHandler; +import org.elasticsearch.http.netty4.pipelining.HttpPipeliningHandler; +import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.support.RestUtils; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.BindTransportException; +import org.elasticsearch.transport.netty4.Netty4OpenChannelsHandler; +import org.elasticsearch.transport.netty4.Netty4Utils; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.Pattern; + +import static org.elasticsearch.common.settings.Setting.boolSetting; +import static org.elasticsearch.common.settings.Setting.byteSizeSetting; +import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_HEADERS; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_METHODS; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ENABLED; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_MAX_AGE; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_BIND_HOST; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION_LEVEL; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_DETAILED_ERRORS_ENABLED; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_CHUNK_SIZE; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_HEADER_SIZE; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_INITIAL_LINE_LENGTH; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_PORT; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_PUBLISH_HOST; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_PUBLISH_PORT; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_RESET_COOKIES; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_PIPELINING; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_PIPELINING_MAX_EVENTS; +import static org.elasticsearch.http.netty4.cors.Netty4CorsHandler.ANY_ORIGIN; + +public class Netty4HttpServerTransport extends AbstractLifecycleComponent implements HttpServerTransport { + + static { + Netty4Utils.setup(); + } + + public static Setting SETTING_HTTP_NETTY_MAX_CUMULATION_BUFFER_CAPACITY = + Setting.byteSizeSetting("http.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), + Property.NodeScope, Property.Shared); + public static Setting SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = + Setting.intSetting("http.netty.max_composite_buffer_components", -1, Property.NodeScope, Property.Shared); + + public static final Setting SETTING_HTTP_WORKER_COUNT = new Setting<>("http.netty.worker_count", + (s) -> Integer.toString(EsExecutors.boundedNumberOfProcessors(s) * 2), + (s) -> Setting.parseInt(s, 1, "http.netty.worker_count"), Property.NodeScope, Property.Shared); + + public static final Setting SETTING_HTTP_TCP_NO_DELAY = + boolSetting("http.tcp_no_delay", NetworkService.TcpSettings.TCP_NO_DELAY, Property.NodeScope, Property.Shared); + public static final Setting SETTING_HTTP_TCP_KEEP_ALIVE = + boolSetting("http.tcp.keep_alive", NetworkService.TcpSettings.TCP_KEEP_ALIVE, Property.NodeScope, Property.Shared); + public static final Setting SETTING_HTTP_TCP_BLOCKING_SERVER = + boolSetting("http.tcp.blocking_server", NetworkService.TcpSettings.TCP_BLOCKING_SERVER, Property.NodeScope, Property.Shared); + public static final Setting SETTING_HTTP_TCP_REUSE_ADDRESS = + boolSetting("http.tcp.reuse_address", NetworkService.TcpSettings.TCP_REUSE_ADDRESS, Property.NodeScope, Property.Shared); + + public static final Setting SETTING_HTTP_TCP_SEND_BUFFER_SIZE = + Setting.byteSizeSetting("http.tcp.send_buffer_size", NetworkService.TcpSettings.TCP_SEND_BUFFER_SIZE, + Property.NodeScope, Property.Shared); + public static final Setting SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE = + Setting.byteSizeSetting("http.tcp.receive_buffer_size", NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE, + Property.NodeScope, Property.Shared); + public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE = + Setting.byteSizeSetting("transport.netty.receive_predictor_size", + settings -> { + long defaultReceiverPredictor = 512 * 1024; + if (JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes() > 0) { + // we can guess a better default... + long l = (long) ((0.3 * JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes()) / SETTING_HTTP_WORKER_COUNT.get + (settings)); + defaultReceiverPredictor = Math.min(defaultReceiverPredictor, Math.max(l, 64 * 1024)); + } + return new ByteSizeValue(defaultReceiverPredictor).toString(); + }, Property.NodeScope); + public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MIN = + byteSizeSetting("http.netty.receive_predictor_min", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope); + public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MAX = + byteSizeSetting("http.netty.receive_predictor_max", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope); + + + protected final NetworkService networkService; + protected final BigArrays bigArrays; + + protected final ByteSizeValue maxContentLength; + protected final ByteSizeValue maxInitialLineLength; + protected final ByteSizeValue maxHeaderSize; + protected final ByteSizeValue maxChunkSize; + + protected final int workerCount; + + protected final boolean blockingServer; + + protected final boolean pipelining; + + protected final int pipeliningMaxEvents; + + protected final boolean compression; + + protected final int compressionLevel; + + protected final boolean resetCookies; + + protected final PortsRange port; + + protected final String bindHosts[]; + + protected final String publishHosts[]; + + protected final boolean detailedErrorsEnabled; + protected final ThreadPool threadPool; + + protected final boolean tcpNoDelay; + protected final boolean tcpKeepAlive; + protected final boolean reuseAddress; + + protected final ByteSizeValue tcpSendBufferSize; + protected final ByteSizeValue tcpReceiveBufferSize; + protected final RecvByteBufAllocator recvByteBufAllocator; + + protected final ByteSizeValue maxCumulationBufferCapacity; + protected final int maxCompositeBufferComponents; + + protected volatile ServerBootstrap serverBootstrap; + + protected volatile BoundTransportAddress boundAddress; + + protected final List serverChannels = new ArrayList<>(); + + // package private for testing + Netty4OpenChannelsHandler serverOpenChannels; + + protected volatile HttpServerAdapter httpServerAdapter; + + private final Netty4CorsConfig corsConfig; + + @Inject + public Netty4HttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool) { + super(settings); + this.networkService = networkService; + this.bigArrays = bigArrays; + this.threadPool = threadPool; + + ByteSizeValue maxContentLength = SETTING_HTTP_MAX_CONTENT_LENGTH.get(settings); + this.maxChunkSize = SETTING_HTTP_MAX_CHUNK_SIZE.get(settings); + this.maxHeaderSize = SETTING_HTTP_MAX_HEADER_SIZE.get(settings); + this.maxInitialLineLength = SETTING_HTTP_MAX_INITIAL_LINE_LENGTH.get(settings); + this.resetCookies = SETTING_HTTP_RESET_COOKIES.get(settings); + this.maxCumulationBufferCapacity = SETTING_HTTP_NETTY_MAX_CUMULATION_BUFFER_CAPACITY.get(settings); + this.maxCompositeBufferComponents = SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS.get(settings); + this.workerCount = SETTING_HTTP_WORKER_COUNT.get(settings); + this.blockingServer = SETTING_HTTP_TCP_BLOCKING_SERVER.get(settings); + this.port = SETTING_HTTP_PORT.get(settings); + this.bindHosts = SETTING_HTTP_BIND_HOST.get(settings).toArray(Strings.EMPTY_ARRAY); + this.publishHosts = SETTING_HTTP_PUBLISH_HOST.get(settings).toArray(Strings.EMPTY_ARRAY); + this.tcpNoDelay = SETTING_HTTP_TCP_NO_DELAY.get(settings); + this.tcpKeepAlive = SETTING_HTTP_TCP_KEEP_ALIVE.get(settings); + this.reuseAddress = SETTING_HTTP_TCP_REUSE_ADDRESS.get(settings); + this.tcpSendBufferSize = SETTING_HTTP_TCP_SEND_BUFFER_SIZE.get(settings); + this.tcpReceiveBufferSize = SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE.get(settings); + this.detailedErrorsEnabled = SETTING_HTTP_DETAILED_ERRORS_ENABLED.get(settings); + + // See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one + ByteSizeValue receivePredictorMin = SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MIN.get(settings); + ByteSizeValue receivePredictorMax = SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MAX.get(settings); + if (receivePredictorMax.bytes() == receivePredictorMin.bytes()) { + recvByteBufAllocator = new FixedRecvByteBufAllocator(Math.toIntExact(receivePredictorMax.bytes())); + } else { + recvByteBufAllocator = new AdaptiveRecvByteBufAllocator( + Math.toIntExact(receivePredictorMin.bytes()), + Math.toIntExact(receivePredictorMin.bytes()), + Math.toIntExact(receivePredictorMax.bytes())); + } + + this.compression = SETTING_HTTP_COMPRESSION.get(settings); + this.compressionLevel = SETTING_HTTP_COMPRESSION_LEVEL.get(settings); + this.pipelining = SETTING_PIPELINING.get(settings); + this.pipeliningMaxEvents = SETTING_PIPELINING_MAX_EVENTS.get(settings); + this.corsConfig = buildCorsConfig(settings); + + // validate max content length + if (maxContentLength.bytes() > Integer.MAX_VALUE) { + logger.warn("maxContentLength[{}] set to high value, resetting it to [100mb]", maxContentLength); + maxContentLength = new ByteSizeValue(100, ByteSizeUnit.MB); + } + this.maxContentLength = maxContentLength; + + logger.debug("using max_chunk_size[{}], max_header_size[{}], max_initial_line_length[{}], max_content_length[{}], " + + "receive_predictor[{}->{}], pipelining[{}], pipelining_max_events[{}]", + maxChunkSize, maxHeaderSize, maxInitialLineLength, this.maxContentLength, + receivePredictorMin, receivePredictorMax, pipelining, pipeliningMaxEvents); + } + + public Settings settings() { + return this.settings; + } + + @Override + public void httpServerAdapter(HttpServerAdapter httpServerAdapter) { + this.httpServerAdapter = httpServerAdapter; + } + + @Override + protected void doStart() { + this.serverOpenChannels = new Netty4OpenChannelsHandler(logger); + + serverBootstrap = new ServerBootstrap(); + if (blockingServer) { + serverBootstrap.group(new OioEventLoopGroup(workerCount, daemonThreadFactory(settings, "http_server_worker"))); + serverBootstrap.channel(OioServerSocketChannel.class); + } else { + serverBootstrap.group(new NioEventLoopGroup(workerCount, daemonThreadFactory(settings, "http_server_worker"))); + serverBootstrap.channel(NioServerSocketChannel.class); + } + + serverBootstrap.childHandler(configureServerChannelHandler()); + + serverBootstrap.childOption(ChannelOption.TCP_NODELAY, SETTING_HTTP_TCP_NO_DELAY.get(settings)); + serverBootstrap.childOption(ChannelOption.SO_KEEPALIVE, SETTING_HTTP_TCP_KEEP_ALIVE.get(settings)); + + final ByteSizeValue tcpSendBufferSize = SETTING_HTTP_TCP_SEND_BUFFER_SIZE.get(settings); + if (tcpSendBufferSize.bytes() > 0) { + serverBootstrap.childOption(ChannelOption.SO_SNDBUF, Math.toIntExact(tcpSendBufferSize.bytes())); + } + + final ByteSizeValue tcpReceiveBufferSize = SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE.get(settings); + if (tcpReceiveBufferSize.bytes() > 0) { + serverBootstrap.childOption(ChannelOption.SO_RCVBUF, Math.toIntExact(tcpReceiveBufferSize.bytes())); + } + + serverBootstrap.option(ChannelOption.RCVBUF_ALLOCATOR, recvByteBufAllocator); + serverBootstrap.childOption(ChannelOption.RCVBUF_ALLOCATOR, recvByteBufAllocator); + + final boolean reuseAddress = SETTING_HTTP_TCP_REUSE_ADDRESS.get(settings); + serverBootstrap.option(ChannelOption.SO_REUSEADDR, reuseAddress); + serverBootstrap.childOption(ChannelOption.SO_REUSEADDR, reuseAddress); + + this.boundAddress = createBoundHttpAddress(); + } + + private BoundTransportAddress createBoundHttpAddress() { + // Bind and start to accept incoming connections. + InetAddress hostAddresses[]; + try { + hostAddresses = networkService.resolveBindHostAddresses(bindHosts); + } catch (IOException e) { + throw new BindHttpException("Failed to resolve host [" + Arrays.toString(bindHosts) + "]", e); + } + + List boundAddresses = new ArrayList<>(hostAddresses.length); + for (InetAddress address : hostAddresses) { + boundAddresses.add(bindAddress(address)); + } + + final InetAddress publishInetAddress; + try { + publishInetAddress = networkService.resolvePublishHostAddresses(publishHosts); + } catch (Exception e) { + throw new BindTransportException("Failed to resolve publish address", e); + } + + final int publishPort = resolvePublishPort(settings, boundAddresses, publishInetAddress); + final InetSocketAddress publishAddress = new InetSocketAddress(publishInetAddress, publishPort); + return new BoundTransportAddress(boundAddresses.toArray(new TransportAddress[0]), new InetSocketTransportAddress(publishAddress)); + } + + // package private for tests + static int resolvePublishPort(Settings settings, List boundAddresses, InetAddress publishInetAddress) { + int publishPort = SETTING_HTTP_PUBLISH_PORT.get(settings); + + if (publishPort < 0) { + for (InetSocketTransportAddress boundAddress : boundAddresses) { + InetAddress boundInetAddress = boundAddress.address().getAddress(); + if (boundInetAddress.isAnyLocalAddress() || boundInetAddress.equals(publishInetAddress)) { + publishPort = boundAddress.getPort(); + break; + } + } + } + + // if no matching boundAddress found, check if there is a unique port for all bound addresses + if (publishPort < 0) { + final IntSet ports = new IntHashSet(); + for (InetSocketTransportAddress boundAddress : boundAddresses) { + ports.add(boundAddress.getPort()); + } + if (ports.size() == 1) { + publishPort = ports.iterator().next().value; + } + } + + if (publishPort < 0) { + throw new BindHttpException("Failed to auto-resolve http publish port, multiple bound addresses " + boundAddresses + + " with distinct ports and none of them matched the publish address (" + publishInetAddress + "). " + + "Please specify a unique port by setting " + SETTING_HTTP_PORT.getKey() + " or " + SETTING_HTTP_PUBLISH_PORT.getKey()); + } + return publishPort; + } + + private Netty4CorsConfig buildCorsConfig(Settings settings) { + if (SETTING_CORS_ENABLED.get(settings) == false) { + return Netty4CorsConfigBuilder.forOrigins().disable().build(); + } + String origin = SETTING_CORS_ALLOW_ORIGIN.get(settings); + final Netty4CorsConfigBuilder builder; + if (Strings.isNullOrEmpty(origin)) { + builder = Netty4CorsConfigBuilder.forOrigins(); + } else if (origin.equals(ANY_ORIGIN)) { + builder = Netty4CorsConfigBuilder.forAnyOrigin(); + } else { + Pattern p = RestUtils.checkCorsSettingForRegex(origin); + if (p == null) { + builder = Netty4CorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin)); + } else { + builder = Netty4CorsConfigBuilder.forPattern(p); + } + } + if (SETTING_CORS_ALLOW_CREDENTIALS.get(settings)) { + builder.allowCredentials(); + } + String[] strMethods = settings.getAsArray(SETTING_CORS_ALLOW_METHODS.getKey()); + HttpMethod[] methods = Arrays.asList(strMethods) + .stream() + .map(HttpMethod::valueOf) + .toArray(size -> new HttpMethod[size]); + return builder.allowedRequestMethods(methods) + .maxAge(SETTING_CORS_MAX_AGE.get(settings)) + .allowedRequestHeaders(settings.getAsArray(SETTING_CORS_ALLOW_HEADERS.getKey())) + .shortCircuit() + .build(); + } + + private InetSocketTransportAddress bindAddress(final InetAddress hostAddress) { + final AtomicReference lastException = new AtomicReference<>(); + final AtomicReference boundSocket = new AtomicReference<>(); + boolean success = port.iterate(new PortsRange.PortCallback() { + @Override + public boolean onPortNumber(int portNumber) { + try { + synchronized (serverChannels) { + ChannelFuture future = serverBootstrap.bind(new InetSocketAddress(hostAddress, portNumber)).sync(); + serverChannels.add(future.channel()); + boundSocket.set((InetSocketAddress) future.channel().localAddress()); + } + } catch (Exception e) { + lastException.set(e); + return false; + } + return true; + } + }); + if (!success) { + throw new BindHttpException("Failed to bind to [" + port + "]", lastException.get()); + } + + if (logger.isDebugEnabled()) { + logger.debug("Bound http to address {{}}", NetworkAddress.format(boundSocket.get())); + } + return new InetSocketTransportAddress(boundSocket.get()); + } + + @Override + protected void doStop() { + synchronized (serverChannels) { + if (!serverChannels.isEmpty()) { + try { + Netty4Utils.closeChannels(serverChannels); + } catch (IOException e) { + logger.trace("exception while closing channels", e); + } + serverChannels.clear(); + } + } + + if (serverOpenChannels != null) { + serverOpenChannels.close(); + serverOpenChannels = null; + } + + if (serverBootstrap != null) { + serverBootstrap.config().group().shutdownGracefully(0, 5, TimeUnit.SECONDS).awaitUninterruptibly(); + serverBootstrap = null; + } + } + + @Override + protected void doClose() { + } + + @Override + public BoundTransportAddress boundAddress() { + return this.boundAddress; + } + + @Override + public HttpInfo info() { + BoundTransportAddress boundTransportAddress = boundAddress(); + if (boundTransportAddress == null) { + return null; + } + return new HttpInfo(boundTransportAddress, maxContentLength.bytes()); + } + + @Override + public HttpStats stats() { + Netty4OpenChannelsHandler channels = serverOpenChannels; + return new HttpStats(channels == null ? 0 : channels.numberOfOpenChannels(), channels == null ? 0 : channels.totalChannels()); + } + + public Netty4CorsConfig getCorsConfig() { + return corsConfig; + } + + protected void dispatchRequest(RestRequest request, RestChannel channel) { + httpServerAdapter.dispatchRequest(request, channel, threadPool.getThreadContext()); + } + + protected void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + if (cause instanceof ReadTimeoutException) { + if (logger.isTraceEnabled()) { + logger.trace("Connection timeout [{}]", ctx.channel().remoteAddress()); + } + ctx.channel().close(); + } else { + if (!lifecycle.started()) { + // ignore + return; + } + if (!NetworkExceptionHelper.isCloseConnectionException(cause)) { + logger.warn("caught exception while handling client http traffic, closing connection {}", cause, ctx.channel()); + ctx.channel().close(); + } else { + logger.debug("caught exception while handling client http traffic, closing connection {}", cause, ctx.channel()); + ctx.channel().close(); + } + } + } + + public ChannelHandler configureServerChannelHandler() { + return new HttpChannelHandler(this, detailedErrorsEnabled, threadPool.getThreadContext()); + } + + static class HttpChannelHandler extends ChannelInitializer { + + private final Netty4HttpServerTransport transport; + private final Netty4HttpRequestHandler requestHandler; + + HttpChannelHandler( + final Netty4HttpServerTransport transport, + final boolean detailedErrorsEnabled, + final ThreadContext threadContext) { + this.transport = transport; + this.requestHandler = new Netty4HttpRequestHandler(transport, detailedErrorsEnabled, threadContext); + } + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ch.pipeline().addLast("openChannels", transport.serverOpenChannels); + final HttpRequestDecoder decoder = new HttpRequestDecoder( + Math.toIntExact(transport.maxInitialLineLength.bytes()), + Math.toIntExact(transport.maxHeaderSize.bytes()), + Math.toIntExact(transport.maxChunkSize.bytes())); + decoder.setCumulator(ByteToMessageDecoder.COMPOSITE_CUMULATOR); + ch.pipeline().addLast("decoder", decoder); + ch.pipeline().addLast("decoder_compress", new HttpContentDecompressor()); + final HttpObjectAggregator aggregator = new HttpObjectAggregator(Math.toIntExact(transport.maxContentLength.bytes())); + if (transport.maxCompositeBufferComponents != -1) { + aggregator.setMaxCumulationBufferComponents(transport.maxCompositeBufferComponents); + } + ch.pipeline().addLast("aggregator", aggregator); + ch.pipeline().addLast("encoder", new HttpResponseEncoder()); + if (transport.compression) { + ch.pipeline().addLast("encoder_compress", new HttpContentCompressor(transport.compressionLevel)); + } + if (SETTING_CORS_ENABLED.get(transport.settings())) { + ch.pipeline().addLast("cors", new Netty4CorsHandler(transport.getCorsConfig())); + } + if (transport.pipelining) { + ch.pipeline().addLast("pipelining", new HttpPipeliningHandler(transport.pipeliningMaxEvents)); + } + ch.pipeline().addLast("handler", requestHandler); + } + + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfig.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfig.java new file mode 100644 index 00000000000..9c81c07e663 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfig.java @@ -0,0 +1,235 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4.cors; + +import io.netty.handler.codec.http.DefaultHttpHeaders; +import io.netty.handler.codec.http.EmptyHttpHeaders; +import io.netty.handler.codec.http.HttpHeaders; +import io.netty.handler.codec.http.HttpMethod; + +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.regex.Pattern; + +/** + * Configuration for Cross-Origin Resource Sharing (CORS). + * + * This class was lifted from the Netty project: + * https://github.com/netty/netty + */ +public final class Netty4CorsConfig { + + private final Optional> origins; + private final Optional pattern; + private final boolean anyOrigin; + private final boolean enabled; + private final boolean allowCredentials; + private final long maxAge; + private final Set allowedRequestMethods; + private final Set allowedRequestHeaders; + private final boolean allowNullOrigin; + private final Map> preflightHeaders; + private final boolean shortCircuit; + + Netty4CorsConfig(final Netty4CorsConfigBuilder builder) { + origins = builder.origins.map(s -> new LinkedHashSet<>(s)); + pattern = builder.pattern; + anyOrigin = builder.anyOrigin; + enabled = builder.enabled; + allowCredentials = builder.allowCredentials; + maxAge = builder.maxAge; + allowedRequestMethods = builder.requestMethods; + allowedRequestHeaders = builder.requestHeaders; + allowNullOrigin = builder.allowNullOrigin; + preflightHeaders = builder.preflightHeaders; + shortCircuit = builder.shortCircuit; + } + + /** + * Determines if support for CORS is enabled. + * + * @return {@code true} if support for CORS is enabled, false otherwise. + */ + public boolean isCorsSupportEnabled() { + return enabled; + } + + /** + * Determines whether a wildcard origin, '*', is supported. + * + * @return {@code boolean} true if any origin is allowed. + */ + public boolean isAnyOriginSupported() { + return anyOrigin; + } + + /** + * Returns the set of allowed origins. + * + * @return {@code Set} the allowed origins. + */ + public Optional> origins() { + return origins; + } + + /** + * Returns whether the input origin is allowed by this configuration. + * + * @return {@code true} if the origin is allowed, otherwise {@code false} + */ + public boolean isOriginAllowed(final String origin) { + if (origins.isPresent()) { + return origins.get().contains(origin); + } else if (pattern.isPresent()) { + return pattern.get().matcher(origin).matches(); + } + return false; + } + + /** + * Web browsers may set the 'Origin' request header to 'null' if a resource is loaded + * from the local file system. + * + * If isNullOriginAllowed is true then the server will response with the wildcard for the + * the CORS response header 'Access-Control-Allow-Origin'. + * + * @return {@code true} if a 'null' origin should be supported. + */ + public boolean isNullOriginAllowed() { + return allowNullOrigin; + } + + /** + * Determines if cookies are supported for CORS requests. + * + * By default cookies are not included in CORS requests but if isCredentialsAllowed returns + * true cookies will be added to CORS requests. Setting this value to true will set the + * CORS 'Access-Control-Allow-Credentials' response header to true. + * + * Please note that cookie support needs to be enabled on the client side as well. + * The client needs to opt-in to send cookies by calling: + *
+     * xhr.withCredentials = true;
+     * 
+ * The default value for 'withCredentials' is false in which case no cookies are sent. + * Setting this to true will included cookies in cross origin requests. + * + * @return {@code true} if cookies are supported. + */ + public boolean isCredentialsAllowed() { + return allowCredentials; + } + + /** + * Gets the maxAge setting. + * + * When making a preflight request the client has to perform two request with can be inefficient. + * This setting will set the CORS 'Access-Control-Max-Age' response header and enables the + * caching of the preflight response for the specified time. During this time no preflight + * request will be made. + * + * @return {@code long} the time in seconds that a preflight request may be cached. + */ + public long maxAge() { + return maxAge; + } + + /** + * Returns the allowed set of Request Methods. The Http methods that should be returned in the + * CORS 'Access-Control-Request-Method' response header. + * + * @return {@code Set} of {@link HttpMethod}s that represent the allowed Request Methods. + */ + public Set allowedRequestMethods() { + return Collections.unmodifiableSet(allowedRequestMethods); + } + + /** + * Returns the allowed set of Request Headers. + * + * The header names returned from this method will be used to set the CORS + * 'Access-Control-Allow-Headers' response header. + * + * @return {@code Set} of strings that represent the allowed Request Headers. + */ + public Set allowedRequestHeaders() { + return Collections.unmodifiableSet(allowedRequestHeaders); + } + + /** + * Returns HTTP response headers that should be added to a CORS preflight response. + * + * @return {@link HttpHeaders} the HTTP response headers to be added. + */ + public HttpHeaders preflightResponseHeaders() { + if (preflightHeaders.isEmpty()) { + return EmptyHttpHeaders.INSTANCE; + } + final HttpHeaders preflightHeaders = new DefaultHttpHeaders(); + for (Map.Entry> entry : this.preflightHeaders.entrySet()) { + final Object value = getValue(entry.getValue()); + if (value instanceof Iterable) { + preflightHeaders.add(entry.getKey().toString(), (Iterable) value); + } else { + preflightHeaders.add(entry.getKey().toString(), value); + } + } + return preflightHeaders; + } + + /** + * Determines whether a CORS request should be rejected if it's invalid before being + * further processing. + * + * CORS headers are set after a request is processed. This may not always be desired + * and this setting will check that the Origin is valid and if it is not valid no + * further processing will take place, and a error will be returned to the calling client. + * + * @return {@code true} if a CORS request should short-circuit upon receiving an invalid Origin header. + */ + public boolean isShortCircuit() { + return shortCircuit; + } + + private static T getValue(final Callable callable) { + try { + return callable.call(); + } catch (final Exception e) { + throw new IllegalStateException("Could not generate value for callable [" + callable + ']', e); + } + } + + @Override + public String toString() { + return "CorsConfig[enabled=" + enabled + + ", origins=" + origins + + ", anyOrigin=" + anyOrigin + + ", isCredentialsAllowed=" + allowCredentials + + ", maxAge=" + maxAge + + ", allowedRequestMethods=" + allowedRequestMethods + + ", allowedRequestHeaders=" + allowedRequestHeaders + + ", preflightHeaders=" + preflightHeaders + ']'; + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfigBuilder.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfigBuilder.java new file mode 100644 index 00000000000..4989cd35f7b --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsConfigBuilder.java @@ -0,0 +1,357 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4.cors; + +import io.netty.handler.codec.http.HttpMethod; + +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.regex.Pattern; + +/** + * Builder used to configure and build a {@link Netty4CorsConfig} instance. + * + * This class was lifted from the Netty project: + * https://github.com/netty/netty + */ +public final class Netty4CorsConfigBuilder { + + /** + * Creates a Builder instance with it's origin set to '*'. + * + * @return Builder to support method chaining. + */ + public static Netty4CorsConfigBuilder forAnyOrigin() { + return new Netty4CorsConfigBuilder(); + } + + /** + * Creates a {@link Netty4CorsConfigBuilder} instance with the specified origin. + * + * @return {@link Netty4CorsConfigBuilder} to support method chaining. + */ + public static Netty4CorsConfigBuilder forOrigin(final String origin) { + if ("*".equals(origin)) { + return new Netty4CorsConfigBuilder(); + } + return new Netty4CorsConfigBuilder(origin); + } + + + /** + * Create a {@link Netty4CorsConfigBuilder} instance with the specified pattern origin. + * + * @param pattern the regular expression pattern to match incoming origins on. + * @return {@link Netty4CorsConfigBuilder} with the configured origin pattern. + */ + public static Netty4CorsConfigBuilder forPattern(final Pattern pattern) { + if (pattern == null) { + throw new IllegalArgumentException("CORS pattern cannot be null"); + } + return new Netty4CorsConfigBuilder(pattern); + } + + /** + * Creates a {@link Netty4CorsConfigBuilder} instance with the specified origins. + * + * @return {@link Netty4CorsConfigBuilder} to support method chaining. + */ + public static Netty4CorsConfigBuilder forOrigins(final String... origins) { + return new Netty4CorsConfigBuilder(origins); + } + + Optional> origins; + Optional pattern; + final boolean anyOrigin; + boolean allowNullOrigin; + boolean enabled = true; + boolean allowCredentials; + long maxAge; + final Set requestMethods = new HashSet<>(); + final Set requestHeaders = new HashSet<>(); + final Map> preflightHeaders = new HashMap<>(); + private boolean noPreflightHeaders; + boolean shortCircuit; + + /** + * Creates a new Builder instance with the origin passed in. + * + * @param origins the origin to be used for this builder. + */ + Netty4CorsConfigBuilder(final String... origins) { + this.origins = Optional.of(new LinkedHashSet<>(Arrays.asList(origins))); + pattern = Optional.empty(); + anyOrigin = false; + } + + /** + * Creates a new Builder instance allowing any origin, "*" which is the + * wildcard origin. + * + */ + Netty4CorsConfigBuilder() { + anyOrigin = true; + origins = Optional.empty(); + pattern = Optional.empty(); + } + + /** + * Creates a new Builder instance allowing any origin that matches the pattern. + * + * @param pattern the pattern to match against for incoming origins. + */ + Netty4CorsConfigBuilder(final Pattern pattern) { + this.pattern = Optional.of(pattern); + origins = Optional.empty(); + anyOrigin = false; + } + + /** + * Web browsers may set the 'Origin' request header to 'null' if a resource is loaded + * from the local file system. Calling this method will enable a successful CORS response + * with a wildcard for the CORS response header 'Access-Control-Allow-Origin'. + * + * @return {@link Netty4CorsConfigBuilder} to support method chaining. + */ + Netty4CorsConfigBuilder allowNullOrigin() { + allowNullOrigin = true; + return this; + } + + /** + * Disables CORS support. + * + * @return {@link Netty4CorsConfigBuilder} to support method chaining. + */ + public Netty4CorsConfigBuilder disable() { + enabled = false; + return this; + } + + /** + * By default cookies are not included in CORS requests, but this method will enable cookies to + * be added to CORS requests. Calling this method will set the CORS 'Access-Control-Allow-Credentials' + * response header to true. + * + * Please note, that cookie support needs to be enabled on the client side as well. + * The client needs to opt-in to send cookies by calling: + *
+     * xhr.withCredentials = true;
+     * 
+ * The default value for 'withCredentials' is false in which case no cookies are sent. + * Setting this to true will included cookies in cross origin requests. + * + * @return {@link Netty4CorsConfigBuilder} to support method chaining. + */ + public Netty4CorsConfigBuilder allowCredentials() { + allowCredentials = true; + return this; + } + + /** + * When making a preflight request the client has to perform two request with can be inefficient. + * This setting will set the CORS 'Access-Control-Max-Age' response header and enables the + * caching of the preflight response for the specified time. During this time no preflight + * request will be made. + * + * @param max the maximum time, in seconds, that the preflight response may be cached. + * @return {@link Netty4CorsConfigBuilder} to support method chaining. + */ + public Netty4CorsConfigBuilder maxAge(final long max) { + maxAge = max; + return this; + } + + /** + * Specifies the allowed set of HTTP Request Methods that should be returned in the + * CORS 'Access-Control-Request-Method' response header. + * + * @param methods the {@link HttpMethod}s that should be allowed. + * @return {@link Netty4CorsConfigBuilder} to support method chaining. + */ + public Netty4CorsConfigBuilder allowedRequestMethods(final HttpMethod... methods) { + requestMethods.addAll(Arrays.asList(methods)); + return this; + } + + /** + * Specifies the if headers that should be returned in the CORS 'Access-Control-Allow-Headers' + * response header. + * + * If a client specifies headers on the request, for example by calling: + *
+     * xhr.setRequestHeader('My-Custom-Header', "SomeValue");
+     * 
+ * the server will receive the above header name in the 'Access-Control-Request-Headers' of the + * preflight request. The server will then decide if it allows this header to be sent for the + * real request (remember that a preflight is not the real request but a request asking the server + * if it allow a request). + * + * @param headers the headers to be added to the preflight 'Access-Control-Allow-Headers' response header. + * @return {@link Netty4CorsConfigBuilder} to support method chaining. + */ + public Netty4CorsConfigBuilder allowedRequestHeaders(final String... headers) { + requestHeaders.addAll(Arrays.asList(headers)); + return this; + } + + /** + * Returns HTTP response headers that should be added to a CORS preflight response. + * + * An intermediary like a load balancer might require that a CORS preflight request + * have certain headers set. This enables such headers to be added. + * + * @param name the name of the HTTP header. + * @param values the values for the HTTP header. + * @return {@link Netty4CorsConfigBuilder} to support method chaining. + */ + public Netty4CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Object... values) { + if (values.length == 1) { + preflightHeaders.put(name, new ConstantValueGenerator(values[0])); + } else { + preflightResponseHeader(name, Arrays.asList(values)); + } + return this; + } + + /** + * Returns HTTP response headers that should be added to a CORS preflight response. + * + * An intermediary like a load balancer might require that a CORS preflight request + * have certain headers set. This enables such headers to be added. + * + * @param name the name of the HTTP header. + * @param value the values for the HTTP header. + * @param the type of values that the Iterable contains. + * @return {@link Netty4CorsConfigBuilder} to support method chaining. + */ + public Netty4CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Iterable value) { + preflightHeaders.put(name, new ConstantValueGenerator(value)); + return this; + } + + /** + * Returns HTTP response headers that should be added to a CORS preflight response. + * + * An intermediary like a load balancer might require that a CORS preflight request + * have certain headers set. This enables such headers to be added. + * + * Some values must be dynamically created when the HTTP response is created, for + * example the 'Date' response header. This can be accomplished by using a Callable + * which will have its 'call' method invoked when the HTTP response is created. + * + * @param name the name of the HTTP header. + * @param valueGenerator a Callable which will be invoked at HTTP response creation. + * @param the type of the value that the Callable can return. + * @return {@link Netty4CorsConfigBuilder} to support method chaining. + */ + public Netty4CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Callable valueGenerator) { + preflightHeaders.put(name, valueGenerator); + return this; + } + + /** + * Specifies that no preflight response headers should be added to a preflight response. + * + * @return {@link Netty4CorsConfigBuilder} to support method chaining. + */ + public Netty4CorsConfigBuilder noPreflightResponseHeaders() { + noPreflightHeaders = true; + return this; + } + + /** + * Specifies that a CORS request should be rejected if it's invalid before being + * further processing. + * + * CORS headers are set after a request is processed. This may not always be desired + * and this setting will check that the Origin is valid and if it is not valid no + * further processing will take place, and a error will be returned to the calling client. + * + * @return {@link Netty4CorsConfigBuilder} to support method chaining. + */ + public Netty4CorsConfigBuilder shortCircuit() { + shortCircuit = true; + return this; + } + + /** + * Builds a {@link Netty4CorsConfig} with settings specified by previous method calls. + * + * @return {@link Netty4CorsConfig} the configured CorsConfig instance. + */ + public Netty4CorsConfig build() { + if (preflightHeaders.isEmpty() && !noPreflightHeaders) { + preflightHeaders.put("date", DateValueGenerator.INSTANCE); + preflightHeaders.put("content-length", new ConstantValueGenerator("0")); + } + return new Netty4CorsConfig(this); + } + + /** + * This class is used for preflight HTTP response values that do not need to be + * generated, but instead the value is "static" in that the same value will be returned + * for each call. + */ + private static final class ConstantValueGenerator implements Callable { + + private final Object value; + + /** + * Sole constructor. + * + * @param value the value that will be returned when the call method is invoked. + */ + private ConstantValueGenerator(final Object value) { + if (value == null) { + throw new IllegalArgumentException("value must not be null"); + } + this.value = value; + } + + @Override + public Object call() { + return value; + } + } + + /** + * This callable is used for the DATE preflight HTTP response HTTP header. + * It's value must be generated when the response is generated, hence will be + * different for every call. + */ + private static final class DateValueGenerator implements Callable { + + static final DateValueGenerator INSTANCE = new DateValueGenerator(); + + @Override + public Date call() throws Exception { + return new Date(); + } + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsHandler.java new file mode 100644 index 00000000000..779eb4fe2e4 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/cors/Netty4CorsHandler.java @@ -0,0 +1,235 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4.cors; + +import io.netty.channel.ChannelDuplexHandler; +import io.netty.channel.ChannelFutureListener; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.http.DefaultFullHttpResponse; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpHeaders; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpResponse; +import io.netty.handler.codec.http.HttpResponseStatus; +import org.elasticsearch.common.Strings; + +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +/** + * Handles Cross Origin Resource Sharing (CORS) requests. + *

+ * This handler can be configured using a {@link Netty4CorsConfig}, please + * refer to this class for details about the configuration options available. + * + * This code was borrowed from Netty 4 and refactored to work for Elasticsearch's Netty 3 setup. + */ +public class Netty4CorsHandler extends ChannelDuplexHandler { + + public static final String ANY_ORIGIN = "*"; + private static Pattern SCHEME_PATTERN = Pattern.compile("^https?://"); + + private final Netty4CorsConfig config; + private HttpRequest request; + + /** + * Creates a new instance with the specified {@link Netty4CorsConfig}. + */ + public Netty4CorsHandler(final Netty4CorsConfig config) { + if (config == null) { + throw new NullPointerException(); + } + this.config = config; + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + if (config.isCorsSupportEnabled() && msg instanceof HttpRequest) { + request = (HttpRequest) msg; + if (isPreflightRequest(request)) { + handlePreflight(ctx, request); + return; + } + if (config.isShortCircuit() && !validateOrigin()) { + forbidden(ctx, request); + return; + } + } + ctx.fireChannelRead(msg); + } + + public static void setCorsResponseHeaders(HttpRequest request, HttpResponse resp, Netty4CorsConfig config) { + if (!config.isCorsSupportEnabled()) { + return; + } + String originHeader = request.headers().get(HttpHeaderNames.ORIGIN); + if (!Strings.isNullOrEmpty(originHeader)) { + final String originHeaderVal; + if (config.isAnyOriginSupported()) { + originHeaderVal = ANY_ORIGIN; + } else if (config.isOriginAllowed(originHeader) || isSameOrigin(originHeader, request.headers().get(HttpHeaderNames.HOST))) { + originHeaderVal = originHeader; + } else { + originHeaderVal = null; + } + if (originHeaderVal != null) { + resp.headers().add(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN, originHeaderVal); + } + } + if (config.isCredentialsAllowed()) { + resp.headers().add(HttpHeaderNames.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true"); + } + } + + private void handlePreflight(final ChannelHandlerContext ctx, final HttpRequest request) { + final HttpResponse response = new DefaultFullHttpResponse(request.protocolVersion(), HttpResponseStatus.OK, true, true); + if (setOrigin(response)) { + setAllowMethods(response); + setAllowHeaders(response); + setAllowCredentials(response); + setMaxAge(response); + setPreflightHeaders(response); + ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE); + } else { + forbidden(ctx, request); + } + } + + private static void forbidden(final ChannelHandlerContext ctx, final HttpRequest request) { + ctx.writeAndFlush(new DefaultFullHttpResponse(request.protocolVersion(), HttpResponseStatus.FORBIDDEN)) + .addListener(ChannelFutureListener.CLOSE); + } + + private static boolean isSameOrigin(final String origin, final String host) { + if (Strings.isNullOrEmpty(host) == false) { + // strip protocol from origin + final String originDomain = SCHEME_PATTERN.matcher(origin).replaceFirst(""); + if (host.equals(originDomain)) { + return true; + } + } + return false; + } + + /** + * This is a non CORS specification feature which enables the setting of preflight + * response headers that might be required by intermediaries. + * + * @param response the HttpResponse to which the preflight response headers should be added. + */ + private void setPreflightHeaders(final HttpResponse response) { + response.headers().add(config.preflightResponseHeaders()); + } + + private boolean setOrigin(final HttpResponse response) { + final String origin = request.headers().get(HttpHeaderNames.ORIGIN); + if (!Strings.isNullOrEmpty(origin)) { + if ("null".equals(origin) && config.isNullOriginAllowed()) { + setAnyOrigin(response); + return true; + } + + if (config.isAnyOriginSupported()) { + if (config.isCredentialsAllowed()) { + echoRequestOrigin(response); + setVaryHeader(response); + } else { + setAnyOrigin(response); + } + return true; + } + if (config.isOriginAllowed(origin)) { + setOrigin(response, origin); + setVaryHeader(response); + return true; + } + } + return false; + } + + private boolean validateOrigin() { + if (config.isAnyOriginSupported()) { + return true; + } + + final String origin = request.headers().get(HttpHeaderNames.ORIGIN); + if (Strings.isNullOrEmpty(origin)) { + // Not a CORS request so we cannot validate it. It may be a non CORS request. + return true; + } + + if ("null".equals(origin) && config.isNullOriginAllowed()) { + return true; + } + + // if the origin is the same as the host of the request, then allow + if (isSameOrigin(origin, request.headers().get(HttpHeaderNames.HOST))) { + return true; + } + + return config.isOriginAllowed(origin); + } + + private void echoRequestOrigin(final HttpResponse response) { + setOrigin(response, request.headers().get(HttpHeaderNames.ORIGIN)); + } + + private static void setVaryHeader(final HttpResponse response) { + response.headers().set(HttpHeaderNames.VARY, HttpHeaderNames.ORIGIN); + } + + private static void setAnyOrigin(final HttpResponse response) { + setOrigin(response, ANY_ORIGIN); + } + + private static void setOrigin(final HttpResponse response, final String origin) { + response.headers().set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN, origin); + } + + private void setAllowCredentials(final HttpResponse response) { + if (config.isCredentialsAllowed() + && !response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN).equals(ANY_ORIGIN)) { + response.headers().set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true"); + } + } + + private static boolean isPreflightRequest(final HttpRequest request) { + final HttpHeaders headers = request.headers(); + return request.method().equals(HttpMethod.OPTIONS) && + headers.contains(HttpHeaderNames.ORIGIN) && + headers.contains(HttpHeaderNames.ACCESS_CONTROL_REQUEST_METHOD); + } + + private void setAllowMethods(final HttpResponse response) { + response.headers().set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_METHODS, config.allowedRequestMethods().stream() + .map(m -> m.name().trim()) + .collect(Collectors.toList())); + } + + private void setAllowHeaders(final HttpResponse response) { + response.headers().set(HttpHeaderNames.ACCESS_CONTROL_ALLOW_HEADERS, config.allowedRequestHeaders()); + } + + private void setMaxAge(final HttpResponse response) { + response.headers().set(HttpHeaderNames.ACCESS_CONTROL_MAX_AGE, config.maxAge()); + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedRequest.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedRequest.java new file mode 100644 index 00000000000..792397a3c3d --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedRequest.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4.pipelining; + +import io.netty.channel.ChannelPromise; +import io.netty.handler.codec.http.FullHttpRequest; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpResponse; +import io.netty.handler.codec.http.LastHttpContent; +import io.netty.util.ReferenceCounted; + +/** + * Permits downstream channel events to be ordered and signalled as to whether more are to come for + * a given sequence. + */ +public class HttpPipelinedRequest implements ReferenceCounted { + + private final LastHttpContent last; + private final int sequence; + + + HttpPipelinedRequest(final LastHttpContent last, final int sequence) { + this.last = last; + this.sequence = sequence; + } + + public LastHttpContent last() { + return last; + } + + public HttpPipelinedResponse createHttpResponse(final FullHttpResponse response, final ChannelPromise promise) { + return new HttpPipelinedResponse(response, promise, sequence); + } + + @Override + public int refCnt() { + return last.refCnt(); + } + + @Override + public ReferenceCounted retain() { + last.retain(); + return this; + } + + @Override + public ReferenceCounted retain(int increment) { + last.retain(increment); + return this; + } + + @Override + public ReferenceCounted touch() { + last.touch(); + return this; + } + + @Override + public ReferenceCounted touch(Object hint) { + last.touch(hint); + return this; + } + + @Override + public boolean release() { + return last.release(); + } + + @Override + public boolean release(int decrement) { + return last.release(decrement); + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedResponse.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedResponse.java new file mode 100644 index 00000000000..21659d5fbdf --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedResponse.java @@ -0,0 +1,95 @@ +package org.elasticsearch.http.netty4.pipelining; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import io.netty.channel.ChannelPromise; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpResponse; +import io.netty.util.ReferenceCounted; + +class HttpPipelinedResponse implements Comparable, ReferenceCounted { + + private final FullHttpResponse response; + private final ChannelPromise promise; + private final int sequence; + + HttpPipelinedResponse(FullHttpResponse response, ChannelPromise promise, int sequence) { + this.response = response; + this.promise = promise; + this.sequence = sequence; + } + + public FullHttpResponse response() { + return response; + } + + public ChannelPromise promise() { + return promise; + } + + public int sequence() { + return sequence; + } + + @Override + public int compareTo(HttpPipelinedResponse o) { + return Integer.compare(sequence, o.sequence); + } + + @Override + public int refCnt() { + return response.refCnt(); + } + + @Override + public ReferenceCounted retain() { + response.retain(); + return this; + } + + @Override + public ReferenceCounted retain(int increment) { + response.retain(increment); + return this; + } + + @Override + public ReferenceCounted touch() { + response.touch(); + return this; + } + + @Override + public ReferenceCounted touch(Object hint) { + response.touch(hint); + return this; + } + + @Override + public boolean release() { + return response.release(); + } + + @Override + public boolean release(int decrement) { + return response.release(decrement); + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java new file mode 100644 index 00000000000..b96b7f5b322 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java @@ -0,0 +1,108 @@ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4.pipelining; + +import io.netty.channel.ChannelDuplexHandler; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelPromise; +import io.netty.handler.codec.http.LastHttpContent; +import io.netty.util.ReferenceCountUtil; +import org.elasticsearch.action.termvectors.TermVectorsFilter; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.transport.netty4.Netty4Utils; + +import java.util.Collections; +import java.util.PriorityQueue; +import java.util.Queue; + +/** + * Implements HTTP pipelining ordering, ensuring that responses are completely served in the same order as their + * corresponding requests. NOTE: A side effect of using this handler is that upstream HttpRequest objects will + * cause the original message event to be effectively transformed into an OrderedUpstreamMessageEvent. Conversely + * OrderedDownstreamChannelEvent objects are expected to be received for the correlating response objects. + */ +public class HttpPipeliningHandler extends ChannelDuplexHandler { + + private static final int INITIAL_EVENTS_HELD = 3; + + private final int maxEventsHeld; + + private int readSequence; + private int writeSequence; + + private final Queue holdingQueue; + + /** + * @param maxEventsHeld the maximum number of channel events that will be retained prior to aborting the channel + * connection. This is required as events cannot queue up indefinitely; we would run out of + * memory if this was the case. + */ + public HttpPipeliningHandler(final int maxEventsHeld) { + this.maxEventsHeld = maxEventsHeld; + this.holdingQueue = new PriorityQueue<>(INITIAL_EVENTS_HELD); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + if (msg instanceof LastHttpContent) { + ctx.fireChannelRead(new HttpPipelinedRequest(((LastHttpContent) msg).retain(), readSequence++)); + } else { + ctx.fireChannelRead(msg); + } + } + + @Override + public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { + if (msg instanceof HttpPipelinedResponse) { + boolean channelShouldClose = false; + + synchronized (holdingQueue) { + if (holdingQueue.size() < maxEventsHeld) { + holdingQueue.add((HttpPipelinedResponse) msg); + + while (!holdingQueue.isEmpty()) { + final HttpPipelinedResponse response = holdingQueue.peek(); + if (response.sequence() != writeSequence) { + break; + } + holdingQueue.remove(); + ctx.write(response.response(), response.promise()); + writeSequence++; + } + } else { + channelShouldClose = true; + } + } + + if (channelShouldClose) { + try { + Netty4Utils.closeChannels(Collections.singletonList(ctx.channel())); + } finally { + ((HttpPipelinedResponse) msg).release(); + promise.setSuccess(); + } + } + } else { + ctx.write(msg, promise); + } + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java new file mode 100644 index 00000000000..1ec2852fd1d --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java @@ -0,0 +1,94 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.http.netty4.Netty4HttpServerTransport; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.transport.netty4.Netty4Transport; + +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Arrays; +import java.util.List; + +public class Netty4Plugin extends Plugin { + + public static final String NETTY_TRANSPORT_NAME = "netty4"; + public static final String NETTY_HTTP_TRANSPORT_NAME = "netty4"; + + public Netty4Plugin(Settings settings) { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + AccessController.doPrivileged((PrivilegedAction) () -> { + try { + Class.forName("io.netty.channel.nio.NioEventLoop"); + } catch (ClassNotFoundException e) { + throw new AssertionError(e); // we don't do anything with this + } + return null; + }); + /* + * Asserts that sun.nio.ch.bugLevel has been set to a non-null value. This assertion will fail if the corresponding code + * is not executed in a doPrivileged block. This can be disabled via `netty.assert.buglevel` setting which isn't registered + * by default but test can do so if they depend on the jar instead of the module. + */ + //TODO Once we have no jar level dependency we can get rid of this. + if (settings.getAsBoolean("netty.assert.buglevel", true)) { + assert System.getProperty("sun.nio.ch.bugLevel") != null : + "sun.nio.ch.bugLevel is null somebody pulls in SelectorUtil without doing stuff in a doPrivileged block?"; + } + } + + @Override + public List> getSettings() { + return Arrays.asList( + Netty4HttpServerTransport.SETTING_HTTP_NETTY_MAX_CUMULATION_BUFFER_CAPACITY, + Netty4HttpServerTransport.SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS, + Netty4HttpServerTransport.SETTING_HTTP_WORKER_COUNT, + Netty4HttpServerTransport.SETTING_HTTP_TCP_NO_DELAY, + Netty4HttpServerTransport.SETTING_HTTP_TCP_KEEP_ALIVE, + Netty4HttpServerTransport.SETTING_HTTP_TCP_BLOCKING_SERVER, + Netty4HttpServerTransport.SETTING_HTTP_TCP_REUSE_ADDRESS, + Netty4HttpServerTransport.SETTING_HTTP_TCP_SEND_BUFFER_SIZE, + Netty4HttpServerTransport.SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE, + Netty4Transport.WORKER_COUNT, + Netty4Transport.NETTY_MAX_CUMULATION_BUFFER_CAPACITY, + Netty4Transport.NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS, + Netty4Transport.NETTY_RECEIVE_PREDICTOR_SIZE, + Netty4Transport.NETTY_RECEIVE_PREDICTOR_MIN, + Netty4Transport.NETTY_RECEIVE_PREDICTOR_MAX, + Netty4Transport.NETTY_BOSS_COUNT + ); + } + + public void onModule(NetworkModule networkModule) { + if (networkModule.canRegisterHttpExtensions()) { + networkModule.registerHttpTransport(NETTY_HTTP_TRANSPORT_NAME, Netty4HttpServerTransport.class); + } + networkModule.registerTransport(NETTY_TRANSPORT_NAME, Netty4Transport.class); + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ByteBufBytesReference.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ByteBufBytesReference.java new file mode 100644 index 00000000000..3b4aba10281 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ByteBufBytesReference.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBuf; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; + +final class ByteBufBytesReference extends BytesReference { + + private final ByteBuf buffer; + private final int length; + private final int offset; + + ByteBufBytesReference(ByteBuf buffer, int length) { + this.buffer = buffer; + this.length = length; + this.offset = buffer.readerIndex(); + assert length <= buffer.readableBytes() : "length[" + length +"] > " + buffer.readableBytes(); + } + + @Override + public byte get(int index) { + return buffer.getByte(offset + index); + } + + @Override + public int length() { + return length; + } + + @Override + public BytesReference slice(int from, int length) { + return new ByteBufBytesReference(buffer.slice(offset + from, length), length); + } + + @Override + public StreamInput streamInput() { + return new ByteBufStreamInput(buffer.duplicate(), length); + } + + @Override + public void writeTo(OutputStream os) throws IOException { + buffer.getBytes(offset, os, length); + } + + ByteBuf toByteBuf() { + return buffer.duplicate(); + } + + @Override + public String utf8ToString() { + return buffer.toString(offset, length, StandardCharsets.UTF_8); + } + + @Override + public BytesRef toBytesRef() { + if (buffer.hasArray()) { + return new BytesRef(buffer.array(), buffer.arrayOffset() + offset, length); + } + final byte[] copy = new byte[length]; + buffer.getBytes(offset, copy); + return new BytesRef(copy); + } + + @Override + public long ramBytesUsed() { + return buffer.capacity(); + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ByteBufStreamInput.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ByteBufStreamInput.java new file mode 100644 index 00000000000..2219ce31ff6 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ByteBufStreamInput.java @@ -0,0 +1,141 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBuf; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; + +import java.io.IOException; + +/** + * A Netty {@link io.netty.buffer.ByteBuf} based {@link org.elasticsearch.common.io.stream.StreamInput}. + */ +class ByteBufStreamInput extends StreamInput { + + private final ByteBuf buffer; + private final int startIndex; + private final int endIndex; + + public ByteBufStreamInput(ByteBuf buffer, int length) { + if (length > buffer.readableBytes()) { + throw new IndexOutOfBoundsException(); + } + this.buffer = buffer; + startIndex = buffer.readerIndex(); + endIndex = startIndex + length; + buffer.markReaderIndex(); + } + + @Override + public BytesReference readBytesReference(int length) throws IOException { + BytesReference ref = Netty4Utils.toBytesReference(buffer.slice(buffer.readerIndex(), length)); + buffer.skipBytes(length); + return ref; + } + + @Override + public BytesRef readBytesRef(int length) throws IOException { + if (!buffer.hasArray()) { + return super.readBytesRef(length); + } + BytesRef bytesRef = new BytesRef(buffer.array(), buffer.arrayOffset() + buffer.readerIndex(), length); + buffer.skipBytes(length); + return bytesRef; + } + + @Override + public int available() throws IOException { + return endIndex - buffer.readerIndex(); + } + + @Override + public void mark(int readlimit) { + buffer.markReaderIndex(); + } + + @Override + public boolean markSupported() { + return true; + } + + @Override + public int read() throws IOException { + if (available() == 0) { + return -1; + } + return buffer.readByte() & 0xff; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + if (len == 0) { + return 0; + } + int available = available(); + if (available == 0) { + return -1; + } + + len = Math.min(available, len); + buffer.readBytes(b, off, len); + return len; + } + + @Override + public void reset() throws IOException { + buffer.resetReaderIndex(); + } + + @Override + public long skip(long n) throws IOException { + if (n > Integer.MAX_VALUE) { + return skipBytes(Integer.MAX_VALUE); + } else { + return skipBytes((int) n); + } + } + + public int skipBytes(int n) throws IOException { + int nBytes = Math.min(available(), n); + buffer.skipBytes(nBytes); + return nBytes; + } + + + @Override + public byte readByte() throws IOException { + return buffer.readByte(); + } + + @Override + public void readBytes(byte[] b, int offset, int len) throws IOException { + int read = read(b, offset, len); + if (read < len) { + throw new IndexOutOfBoundsException(); + } + } + + @Override + public void close() throws IOException { + // nothing to do here + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java new file mode 100644 index 00000000000..61555294018 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java @@ -0,0 +1,187 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.util.internal.logging.AbstractInternalLogger; +import org.elasticsearch.common.SuppressLoggerChecks; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; + +@SuppressLoggerChecks(reason = "safely delegates to logger") +class Netty4InternalESLogger extends AbstractInternalLogger { + + private final ESLogger logger; + + Netty4InternalESLogger(final String name) { + super(name); + this.logger = Loggers.getLogger(name); + } + + @Override + public boolean isTraceEnabled() { + return logger.isTraceEnabled(); + } + + @Override + public void trace(String msg) { + logger.trace(msg); + } + + @Override + public void trace(String format, Object arg) { + logger.trace(format, arg); + } + + @Override + public void trace(String format, Object argA, Object argB) { + logger.trace(format, argA, argB); + } + + @Override + public void trace(String format, Object... arguments) { + logger.trace(format, arguments); + } + + @Override + public void trace(String msg, Throwable t) { + logger.trace(msg, t); + } + + @Override + public boolean isDebugEnabled() { + return logger.isDebugEnabled(); + } + + @Override + public void debug(String msg) { + logger.debug(msg); + } + + @Override + public void debug(String format, Object arg) { + logger.debug(format, arg); + } + + @Override + public void debug(String format, Object argA, Object argB) { + logger.debug(format, argA, argB); + } + + @Override + public void debug(String format, Object... arguments) { + logger.debug(format, arguments); + } + + @Override + public void debug(String msg, Throwable t) { + logger.debug(msg, t); + } + + @Override + public boolean isInfoEnabled() { + return logger.isInfoEnabled(); + } + + @Override + public void info(String msg) { + logger.info(msg); + } + + @Override + public void info(String format, Object arg) { + logger.info(format, arg); + } + + @Override + public void info(String format, Object argA, Object argB) { + logger.info(format, argA, argB); + } + + @Override + public void info(String format, Object... arguments) { + logger.info(format, arguments); + } + + @Override + public void info(String msg, Throwable t) { + logger.info(msg, t); + } + + @Override + public boolean isWarnEnabled() { + return logger.isWarnEnabled(); + } + + @Override + public void warn(String msg) { + logger.warn(msg); + } + + @Override + public void warn(String format, Object arg) { + logger.warn(format, arg); + } + + @Override + public void warn(String format, Object... arguments) { + logger.warn(format, arguments); + } + + @Override + public void warn(String format, Object argA, Object argB) { + logger.warn(format, argA, argB); + } + + @Override + public void warn(String msg, Throwable t) { + logger.warn(msg, t); + } + + @Override + public boolean isErrorEnabled() { + return logger.isErrorEnabled(); + } + + @Override + public void error(String msg) { + logger.equals(msg); + } + + @Override + public void error(String format, Object arg) { + logger.error(format, arg); + } + + @Override + public void error(String format, Object argA, Object argB) { + logger.error(format, argA, argB); + } + + @Override + public void error(String format, Object... arguments) { + logger.error(format, arguments); + } + + @Override + public void error(String msg, Throwable t) { + logger.error(msg, t); + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java new file mode 100644 index 00000000000..cddbdac3e76 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.channel.ChannelDuplexHandler; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelPromise; +import io.netty.util.ReferenceCountUtil; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.transport.TcpHeader; +import org.elasticsearch.transport.TransportServiceAdapter; +import org.elasticsearch.transport.Transports; + +import java.net.InetSocketAddress; + +/** + * A handler (must be the last one!) that does size based frame decoding and forwards the actual message + * to the relevant action. + */ +final class Netty4MessageChannelHandler extends ChannelDuplexHandler { + + private final TransportServiceAdapter transportServiceAdapter; + private final Netty4Transport transport; + private final String profileName; + + Netty4MessageChannelHandler(Netty4Transport transport, String profileName) { + this.transportServiceAdapter = transport.transportServiceAdapter(); + this.transport = transport; + this.profileName = profileName; + } + + @Override + public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { + if (msg instanceof ByteBuf && transportServiceAdapter != null) { + // record the number of bytes send on the channel + promise.addListener(f -> transportServiceAdapter.addBytesSent(((ByteBuf) msg).readableBytes())); + } + ctx.write(msg, promise); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + Transports.assertTransportThread(); + if (!(msg instanceof ByteBuf)) { + ctx.fireChannelRead(msg); + return; + } + final ByteBuf buffer = (ByteBuf) msg; + final int remainingMessageSize = buffer.getInt(buffer.readerIndex() - TcpHeader.MESSAGE_LENGTH_SIZE); + final int expectedReaderIndex = buffer.readerIndex() + remainingMessageSize; + InetSocketAddress remoteAddress = (InetSocketAddress) ctx.channel().remoteAddress(); + try { + // netty always copies a buffer, either in NioWorker in its read handler, where it copies to a fresh + // buffer, or in the cumulation buffer, which is cleaned each time so it could be bigger than the actual size + BytesReference reference = Netty4Utils.toBytesReference(buffer, remainingMessageSize); + transport.messageReceived(reference, ctx.channel(), profileName, remoteAddress, remainingMessageSize); + } finally { + // Set the expected position of the buffer, no matter what happened + buffer.readerIndex(expectedReaderIndex); + } + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + transport.exceptionCaught(ctx, cause); + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java new file mode 100644 index 00000000000..0562a0d4661 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelFutureListener; +import io.netty.channel.ChannelHandler; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.metrics.CounterMetric; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +@ChannelHandler.Sharable +public class Netty4OpenChannelsHandler extends ChannelInboundHandlerAdapter implements Releasable { + + final Set openChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); + final CounterMetric openChannelsMetric = new CounterMetric(); + final CounterMetric totalChannelsMetric = new CounterMetric(); + + final ESLogger logger; + + public Netty4OpenChannelsHandler(ESLogger logger) { + this.logger = logger; + } + + final ChannelFutureListener remover = new ChannelFutureListener() { + @Override + public void operationComplete(ChannelFuture future) throws Exception { + boolean removed = openChannels.remove(future.channel()); + if (removed) { + openChannelsMetric.dec(); + } + if (logger.isTraceEnabled()) { + logger.trace("channel closed: {}", future.channel()); + } + } + }; + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + if (logger.isTraceEnabled()) { + logger.trace("channel opened: {}", ctx.channel()); + } + final boolean added = openChannels.add(ctx.channel()); + if (added) { + openChannelsMetric.inc(); + totalChannelsMetric.inc(); + ctx.channel().closeFuture().addListener(remover); + } + + super.channelActive(ctx); + } + + public long numberOfOpenChannels() { + return openChannelsMetric.count(); + } + + public long totalChannels() { + return totalChannelsMetric.count(); + } + + @Override + public void close() { + try { + Netty4Utils.closeChannels(openChannels); + } catch (IOException e) { + logger.trace("exception while closing channels", e); + } + openChannels.clear(); + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoder.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoder.java new file mode 100644 index 00000000000..144c036f08a --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoder.java @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.ByteToMessageDecoder; +import io.netty.handler.codec.TooLongFrameException; +import org.elasticsearch.transport.TcpHeader; +import org.elasticsearch.transport.TcpTransport; + +import java.util.List; + +final class Netty4SizeHeaderFrameDecoder extends ByteToMessageDecoder { + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf in, List out) throws Exception { + try { + boolean continueProcessing = TcpTransport.validateMessageHeader(Netty4Utils.toBytesReference(in)); + final ByteBuf message = in.skipBytes(TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE); + if (!continueProcessing) return; + out.add(message); + } catch (IllegalArgumentException ex) { + throw new TooLongFrameException(ex); + } catch (IllegalStateException ex) { + /* decode will be called until the ByteBuf is fully consumed; when it is fully + * consumed, transport#validateMessageHeader will throw an IllegalStateException which + * is okay, it means we have finished consuming the ByteBuf and we can get out + */ + } + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java new file mode 100644 index 00000000000..c1b2ef10211 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -0,0 +1,506 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.bootstrap.Bootstrap; +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.AdaptiveRecvByteBufAllocator; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelFutureListener; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.FixedRecvByteBufAllocator; +import io.netty.channel.RecvByteBufAllocator; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.oio.OioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.channel.socket.nio.NioSocketChannel; +import io.netty.channel.socket.oio.OioServerSocketChannel; +import io.netty.channel.socket.oio.OioSocketChannel; +import io.netty.util.concurrent.Future; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.network.NetworkService.TcpSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportServiceAdapter; +import org.elasticsearch.transport.TransportSettings; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.common.settings.Setting.byteSizeSetting; +import static org.elasticsearch.common.settings.Setting.intSetting; +import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; +import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; + +/** + * There are 4 types of connections per node, low/med/high/ping. Low if for batch oriented APIs (like recovery or + * batch) with high payload that will cause regular request. (like search or single index) to take + * longer. Med is for the typical search / single doc index. And High for things like cluster state. Ping is reserved for + * sending out ping requests to other nodes. + */ +public class Netty4Transport extends TcpTransport { + + static { + Netty4Utils.setup(); + } + + public static final Setting WORKER_COUNT = + new Setting<>("transport.netty.worker_count", + (s) -> Integer.toString(EsExecutors.boundedNumberOfProcessors(s) * 2), + (s) -> Setting.parseInt(s, 1, "transport.netty.worker_count"), Property.NodeScope, Property.Shared); + + public static final Setting NETTY_MAX_CUMULATION_BUFFER_CAPACITY = + Setting.byteSizeSetting( + "transport.netty.max_cumulation_buffer_capacity", + new ByteSizeValue(-1), + Property.NodeScope, + Property.Shared); + public static final Setting NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = + Setting.intSetting("transport.netty.max_composite_buffer_components", -1, -1, Property.NodeScope, Property.Shared); + + // See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one + public static final Setting NETTY_RECEIVE_PREDICTOR_SIZE = Setting.byteSizeSetting( + "transport.netty.receive_predictor_size", + settings -> { + long defaultReceiverPredictor = 512 * 1024; + if (JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes() > 0) { + // we can guess a better default... + long l = (long) ((0.3 * JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes()) / WORKER_COUNT.get(settings)); + defaultReceiverPredictor = Math.min(defaultReceiverPredictor, Math.max(l, 64 * 1024)); + } + return new ByteSizeValue(defaultReceiverPredictor).toString(); + }, + Property.NodeScope, + Property.Shared); + public static final Setting NETTY_RECEIVE_PREDICTOR_MIN = + byteSizeSetting("transport.netty.receive_predictor_min", NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope, Property.Shared); + public static final Setting NETTY_RECEIVE_PREDICTOR_MAX = + byteSizeSetting("transport.netty.receive_predictor_max", NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope, Property.Shared); + public static final Setting NETTY_BOSS_COUNT = + intSetting("transport.netty.boss_count", 1, 1, Property.NodeScope, Property.Shared); + + + protected final ByteSizeValue maxCumulationBufferCapacity; + protected final int maxCompositeBufferComponents; + protected final RecvByteBufAllocator recvByteBufAllocator; + protected final int workerCount; + protected final ByteSizeValue receivePredictorMin; + protected final ByteSizeValue receivePredictorMax; + // package private for testing + volatile Netty4OpenChannelsHandler serverOpenChannels; + protected volatile Bootstrap bootstrap; + protected final Map serverBootstraps = newConcurrentMap(); + + @Inject + public Netty4Transport(Settings settings, ThreadPool threadPool, NetworkService networkService, BigArrays bigArrays, + NamedWriteableRegistry namedWriteableRegistry, CircuitBreakerService circuitBreakerService) { + super("netty", settings, threadPool, bigArrays, circuitBreakerService, namedWriteableRegistry, networkService); + this.workerCount = WORKER_COUNT.get(settings); + this.maxCumulationBufferCapacity = NETTY_MAX_CUMULATION_BUFFER_CAPACITY.get(settings); + this.maxCompositeBufferComponents = NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS.get(settings); + + // See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one + this.receivePredictorMin = NETTY_RECEIVE_PREDICTOR_MIN.get(settings); + this.receivePredictorMax = NETTY_RECEIVE_PREDICTOR_MAX.get(settings); + if (receivePredictorMax.bytes() == receivePredictorMin.bytes()) { + recvByteBufAllocator = new FixedRecvByteBufAllocator((int) receivePredictorMax.bytes()); + } else { + recvByteBufAllocator = new AdaptiveRecvByteBufAllocator((int) receivePredictorMin.bytes(), + (int) receivePredictorMin.bytes(), (int) receivePredictorMax.bytes()); + } + } + + TransportServiceAdapter transportServiceAdapter() { + return transportServiceAdapter; + } + + @Override + protected void doStart() { + boolean success = false; + try { + bootstrap = createBootstrap(); + if (NetworkService.NETWORK_SERVER.get(settings)) { + final Netty4OpenChannelsHandler openChannels = new Netty4OpenChannelsHandler(logger); + this.serverOpenChannels = openChannels; + // loop through all profiles and start them up, special handling for default one + for (Map.Entry entry : buildProfileSettings().entrySet()) { + // merge fallback settings with default settings with profile settings so we have complete settings with default values + final Settings settings = Settings.builder() + .put(createFallbackSettings()) + .put(entry.getValue()).build(); + createServerBootstrap(entry.getKey(), settings); + bindServer(entry.getKey(), settings); + } + } + super.doStart(); + success = true; + } finally { + if (success == false) { + doStop(); + } + } + } + + private Bootstrap createBootstrap() { + final Bootstrap bootstrap = new Bootstrap(); + if (TCP_BLOCKING_CLIENT.get(settings)) { + bootstrap.group(new OioEventLoopGroup(1, daemonThreadFactory(settings, TRANSPORT_CLIENT_WORKER_THREAD_NAME_PREFIX))); + bootstrap.channel(OioSocketChannel.class); + } else { + bootstrap.group(new NioEventLoopGroup(workerCount, daemonThreadFactory(settings, TRANSPORT_CLIENT_BOSS_THREAD_NAME_PREFIX))); + bootstrap.channel(NioSocketChannel.class); + } + + bootstrap.handler(new ChannelInitializer() { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ch.pipeline().addLast("size", new Netty4SizeHeaderFrameDecoder()); + // using a dot as a prefix means this cannot come from any settings parsed + ch.pipeline().addLast("dispatcher", new Netty4MessageChannelHandler(Netty4Transport.this, ".client")); + } + + }); + + bootstrap.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, Math.toIntExact(connectTimeout.millis())); + bootstrap.option(ChannelOption.TCP_NODELAY, TCP_NO_DELAY.get(settings)); + bootstrap.option(ChannelOption.SO_KEEPALIVE, TCP_KEEP_ALIVE.get(settings)); + + final ByteSizeValue tcpSendBufferSize = TCP_SEND_BUFFER_SIZE.get(settings); + if (tcpSendBufferSize.bytes() > 0) { + bootstrap.option(ChannelOption.SO_SNDBUF, Math.toIntExact(tcpSendBufferSize.bytes())); + } + + final ByteSizeValue tcpReceiveBufferSize = TCP_RECEIVE_BUFFER_SIZE.get(settings); + if (tcpReceiveBufferSize.bytes() > 0) { + bootstrap.option(ChannelOption.SO_RCVBUF, Math.toIntExact(tcpReceiveBufferSize.bytes())); + } + + bootstrap.option(ChannelOption.RCVBUF_ALLOCATOR, recvByteBufAllocator); + + final boolean reuseAddress = TCP_REUSE_ADDRESS.get(settings); + bootstrap.option(ChannelOption.SO_REUSEADDR, reuseAddress); + + bootstrap.validate(); + + return bootstrap; + } + + private Settings createFallbackSettings() { + Settings.Builder fallbackSettingsBuilder = Settings.builder(); + + List fallbackBindHost = TransportSettings.BIND_HOST.get(settings); + if (fallbackBindHost.isEmpty() == false) { + fallbackSettingsBuilder.putArray("bind_host", fallbackBindHost); + } + + List fallbackPublishHost = TransportSettings.PUBLISH_HOST.get(settings); + if (fallbackPublishHost.isEmpty() == false) { + fallbackSettingsBuilder.putArray("publish_host", fallbackPublishHost); + } + + boolean fallbackTcpNoDelay = settings.getAsBoolean("transport.netty.tcp_no_delay", TcpSettings.TCP_NO_DELAY.get(settings)); + fallbackSettingsBuilder.put("tcp_no_delay", fallbackTcpNoDelay); + + boolean fallbackTcpKeepAlive = settings.getAsBoolean("transport.netty.tcp_keep_alive", TcpSettings.TCP_KEEP_ALIVE.get(settings)); + fallbackSettingsBuilder.put("tcp_keep_alive", fallbackTcpKeepAlive); + + boolean fallbackReuseAddress = settings.getAsBoolean("transport.netty.reuse_address", TcpSettings.TCP_REUSE_ADDRESS.get(settings)); + fallbackSettingsBuilder.put("reuse_address", fallbackReuseAddress); + + ByteSizeValue fallbackTcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", + TCP_SEND_BUFFER_SIZE.get(settings)); + if (fallbackTcpSendBufferSize.bytes() >= 0) { + fallbackSettingsBuilder.put("tcp_send_buffer_size", fallbackTcpSendBufferSize); + } + + ByteSizeValue fallbackTcpBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", + TCP_RECEIVE_BUFFER_SIZE.get(settings)); + if (fallbackTcpBufferSize.bytes() >= 0) { + fallbackSettingsBuilder.put("tcp_receive_buffer_size", fallbackTcpBufferSize); + } + + return fallbackSettingsBuilder.build(); + } + + private void createServerBootstrap(String name, Settings settings) { + if (logger.isDebugEnabled()) { + logger.debug("using profile[{}], worker_count[{}], port[{}], bind_host[{}], publish_host[{}], compress[{}], " + + "connect_timeout[{}], connections_per_node[{}/{}/{}/{}/{}], receive_predictor[{}->{}]", + name, workerCount, settings.get("port"), settings.get("bind_host"), settings.get("publish_host"), compress, + connectTimeout, connectionsPerNodeRecovery, connectionsPerNodeBulk, connectionsPerNodeReg, connectionsPerNodeState, + connectionsPerNodePing, receivePredictorMin, receivePredictorMax); + } + + final ThreadFactory workerFactory = daemonThreadFactory(this.settings, HTTP_SERVER_WORKER_THREAD_NAME_PREFIX, name); + + final ServerBootstrap serverBootstrap = new ServerBootstrap(); + + if (TCP_BLOCKING_SERVER.get(settings)) { + serverBootstrap.group(new OioEventLoopGroup(workerCount, workerFactory)); + serverBootstrap.channel(OioServerSocketChannel.class); + } else { + serverBootstrap.group(new NioEventLoopGroup(workerCount, workerFactory)); + serverBootstrap.channel(NioServerSocketChannel.class); + } + + serverBootstrap.childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ch.pipeline().addLast("open_channels", Netty4Transport.this.serverOpenChannels); + ch.pipeline().addLast("size", new Netty4SizeHeaderFrameDecoder()); + ch.pipeline().addLast("dispatcher", new Netty4MessageChannelHandler(Netty4Transport.this, name)); + } + }); + + serverBootstrap.childOption(ChannelOption.TCP_NODELAY, TCP_NO_DELAY.get(settings)); + serverBootstrap.childOption(ChannelOption.SO_KEEPALIVE, TCP_KEEP_ALIVE.get(settings)); + + final ByteSizeValue tcpSendBufferSize = TCP_SEND_BUFFER_SIZE.getDefault(settings); + if (tcpSendBufferSize != null && tcpSendBufferSize.bytes() > 0) { + serverBootstrap.childOption(ChannelOption.SO_SNDBUF, Math.toIntExact(tcpSendBufferSize.bytes())); + } + + final ByteSizeValue tcpReceiveBufferSize = TCP_RECEIVE_BUFFER_SIZE.getDefault(settings); + if (tcpReceiveBufferSize != null && tcpReceiveBufferSize.bytes() > 0) { + serverBootstrap.childOption(ChannelOption.SO_RCVBUF, Math.toIntExact(tcpReceiveBufferSize.bytesAsInt())); + } + + serverBootstrap.option(ChannelOption.RCVBUF_ALLOCATOR, recvByteBufAllocator); + serverBootstrap.childOption(ChannelOption.RCVBUF_ALLOCATOR, recvByteBufAllocator); + + final boolean reuseAddress = TCP_REUSE_ADDRESS.get(settings); + serverBootstrap.option(ChannelOption.SO_REUSEADDR, reuseAddress); + serverBootstrap.childOption(ChannelOption.SO_REUSEADDR, reuseAddress); + + serverBootstrap.validate(); + + serverBootstraps.put(name, serverBootstrap); + } + + protected final void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + final Throwable unwrapped = ExceptionsHelper.unwrap(cause, ElasticsearchException.class); + final Throwable t = unwrapped != null ? unwrapped : cause; + onException(ctx.channel(), t instanceof Exception ? (Exception) t : new ElasticsearchException(t)); + } + + @Override + public long serverOpen() { + Netty4OpenChannelsHandler channels = serverOpenChannels; + return channels == null ? 0 : channels.numberOfOpenChannels(); + } + + protected NodeChannels connectToChannelsLight(DiscoveryNode node) { + InetSocketAddress address = ((InetSocketTransportAddress) node.getAddress()).address(); + ChannelFuture connect = bootstrap.connect(address); + connect.awaitUninterruptibly((long) (connectTimeout.millis() * 1.5)); + if (!connect.isSuccess()) { + throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connect.cause()); + } + Channel[] channels = new Channel[1]; + channels[0] = connect.channel(); + channels[0].closeFuture().addListener(new ChannelCloseListener(node)); + return new NodeChannels(channels, channels, channels, channels, channels); + } + + protected NodeChannels connectToChannels(DiscoveryNode node) { + final NodeChannels nodeChannels = + new NodeChannels( + new Channel[connectionsPerNodeRecovery], + new Channel[connectionsPerNodeBulk], + new Channel[connectionsPerNodeReg], + new Channel[connectionsPerNodeState], + new Channel[connectionsPerNodePing]); + boolean success = false; + try { + int numConnections = + connectionsPerNodeRecovery + + connectionsPerNodeBulk + + connectionsPerNodeReg + + connectionsPerNodeState + + connectionsPerNodeRecovery; + final ArrayList connections = new ArrayList<>(numConnections); + final InetSocketAddress address = ((InetSocketTransportAddress) node.getAddress()).address(); + for (int i = 0; i < numConnections; i++) { + connections.add(bootstrap.connect(address)); + } + final Iterator iterator = connections.iterator(); + try { + for (Channel[] channels : nodeChannels.getChannelArrays()) { + for (int i = 0; i < channels.length; i++) { + assert iterator.hasNext(); + ChannelFuture future = iterator.next(); + future.awaitUninterruptibly((long) (connectTimeout.millis() * 1.5)); + if (!future.isSuccess()) { + throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", future.cause()); + } + channels[i] = future.channel(); + channels[i].closeFuture().addListener(new ChannelCloseListener(node)); + } + } + if (nodeChannels.recovery.length == 0) { + if (nodeChannels.bulk.length > 0) { + nodeChannels.recovery = nodeChannels.bulk; + } else { + nodeChannels.recovery = nodeChannels.reg; + } + } + if (nodeChannels.bulk.length == 0) { + nodeChannels.bulk = nodeChannels.reg; + } + } catch (final RuntimeException e) { + for (final ChannelFuture future : Collections.unmodifiableList(connections)) { + FutureUtils.cancel(future); + if (future.channel() != null && future.channel().isOpen()) { + try { + future.channel().close(); + } catch (Exception inner) { + e.addSuppressed(inner); + } + } + } + throw e; + } + success = true; + } finally { + if (success == false) { + try { + nodeChannels.close(); + } catch (IOException e) { + logger.trace("exception while closing channels", e); + } + } + } + return nodeChannels; + } + + private class ChannelCloseListener implements ChannelFutureListener { + + private final DiscoveryNode node; + + private ChannelCloseListener(DiscoveryNode node) { + this.node = node; + } + + @Override + public void operationComplete(final ChannelFuture future) throws Exception { + NodeChannels nodeChannels = connectedNodes.get(node); + if (nodeChannels != null && nodeChannels.hasChannel(future.channel())) { + threadPool.generic().execute(() -> disconnectFromNode(node, future.channel(), "channel closed event")); + } + } + } + + @Override + protected void sendMessage(Channel channel, BytesReference reference, Runnable sendListener, boolean close) { + final ChannelFuture future = channel.writeAndFlush(Netty4Utils.toByteBuf(reference)); + if (close) { + future.addListener(f -> { + try { + sendListener.run(); + } finally { + future.channel().close(); + } + }); + } else { + future.addListener(f -> sendListener.run()); + } + } + + @Override + protected void closeChannels(final List channels) throws IOException { + Netty4Utils.closeChannels(channels); + } + + @Override + protected InetSocketAddress getLocalAddress(Channel channel) { + return (InetSocketAddress) channel.localAddress(); + } + + @Override + protected Channel bind(String name, InetSocketAddress address) { + return serverBootstraps.get(name).bind(address).syncUninterruptibly().channel(); + } + + ScheduledPing getPing() { + return scheduledPing; + } + + @Override + protected boolean isOpen(Channel channel) { + return channel.isOpen(); + } + + @Override + @SuppressForbidden(reason = "debug") + protected void stopInternal() { + Releasables.close(serverOpenChannels, () -> { + final List>> serverBootstrapCloseFutures = new ArrayList<>(serverBootstraps.size()); + for (final Map.Entry entry : serverBootstraps.entrySet()) { + serverBootstrapCloseFutures.add( + Tuple.tuple(entry.getKey(), entry.getValue().config().group().shutdownGracefully(0, 5, TimeUnit.SECONDS))); + } + for (final Tuple> future : serverBootstrapCloseFutures) { + future.v2().awaitUninterruptibly(); + if (!future.v2().isSuccess()) { + logger.debug("Error closing server bootstrap for profile [{}]", future.v2().cause(), future.v1()); + } + } + serverBootstraps.clear(); + + if (bootstrap != null) { + bootstrap.config().group().shutdownGracefully(0, 5, TimeUnit.SECONDS).awaitUninterruptibly(); + bootstrap = null; + } + }); + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java new file mode 100644 index 00000000000..53cf1b329aa --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java @@ -0,0 +1,121 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.CompositeByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.util.internal.logging.InternalLogger; +import io.netty.util.internal.logging.InternalLoggerFactory; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefIterator; +import org.elasticsearch.common.bytes.BytesReference; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +public class Netty4Utils { + + static { + InternalLoggerFactory.setDefaultFactory(new InternalLoggerFactory() { + + @Override + public InternalLogger newInstance(final String name) { + return new Netty4InternalESLogger(name.replace("io.netty.", "netty.")); + } + + }); + } + + public static void setup() { + + } + + /** + * Turns the given BytesReference into a ByteBuf. Note: the returned ByteBuf will reference the internal + * pages of the BytesReference. Don't free the bytes of reference before the ByteBuf goes out of scope. + */ + public static ByteBuf toByteBuf(final BytesReference reference) { + if (reference.length() == 0) { + return Unpooled.EMPTY_BUFFER; + } + if (reference instanceof ByteBufBytesReference) { + return ((ByteBufBytesReference) reference).toByteBuf(); + } else { + final BytesRefIterator iterator = reference.iterator(); + // usually we have one, two, or three components + // from the header, the message, and a buffer + final List buffers = new ArrayList<>(3); + try { + BytesRef slice; + while ((slice = iterator.next()) != null) { + buffers.add(Unpooled.wrappedBuffer(slice.bytes, slice.offset, slice.length)); + } + final CompositeByteBuf composite = Unpooled.compositeBuffer(buffers.size()); + composite.addComponents(true, buffers); + return composite; + } catch (IOException ex) { + throw new AssertionError("no IO happens here", ex); + } + } + } + + /** + * Wraps the given ChannelBuffer with a BytesReference + */ + public static BytesReference toBytesReference(final ByteBuf buffer) { + return toBytesReference(buffer, buffer.readableBytes()); + } + + /** + * Wraps the given ChannelBuffer with a BytesReference of a given size + */ + static BytesReference toBytesReference(final ByteBuf buffer, final int size) { + return new ByteBufBytesReference(buffer, size); + } + + public static void closeChannels(final Collection channels) throws IOException { + IOException closingExceptions = null; + final List futures = new ArrayList<>(); + for (final Channel channel : channels) { + try { + if (channel != null && channel.isOpen()) { + futures.add(channel.close()); + } + } catch (Exception e) { + if (closingExceptions == null) { + closingExceptions = new IOException("failed to close channels"); + } + closingExceptions.addSuppressed(e); + } + } + for (final ChannelFuture future : futures) { + future.awaitUninterruptibly(); + } + + if (closingExceptions != null) { + throw closingExceptions; + } + } + +} diff --git a/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy b/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 00000000000..45c8cd923aa --- /dev/null +++ b/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +grant { + // Netty SelectorUtil wants to change this, because of https://bugs.openjdk.java.net/browse/JDK-6427854 + // the bug says it only happened rarely, and that its fixed, but apparently it still happens rarely! + permission java.util.PropertyPermission "sun.nio.ch.bugLevel", "write"; +}; \ No newline at end of file diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/ESNetty4IntegTestCase.java b/modules/transport-netty4/src/test/java/org/elasticsearch/ESNetty4IntegTestCase.java new file mode 100644 index 00000000000..1feded41ed5 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/ESNetty4IntegTestCase.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch; + +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.Netty4Plugin; +import org.elasticsearch.transport.netty4.Netty4Transport; + +import java.util.Collection; + +@ESIntegTestCase.SuppressLocalMode +public abstract class ESNetty4IntegTestCase extends ESIntegTestCase { + + @Override + protected boolean ignoreExternalCluster() { + return true; + } + + @Override + protected boolean addMockTransportService() { + return false; + } + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal)); + // randomize netty settings + if (randomBoolean()) { + builder.put(Netty4Transport.WORKER_COUNT.getKey(), random().nextInt(3) + 1); + } + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, Netty4Plugin.NETTY_TRANSPORT_NAME); + builder.put(NetworkModule.HTTP_TYPE_KEY, Netty4Plugin.NETTY_HTTP_TRANSPORT_NAME); + return builder.build(); + } + + @Override + protected Settings transportClientSettings() { + Settings.Builder builder = Settings.builder().put(super.transportClientSettings()); + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, Netty4Plugin.NETTY_TRANSPORT_NAME); + return builder.build(); + } + + @Override + protected Collection> nodePlugins() { + return pluginList(Netty4Plugin.class); + } + + @Override + protected Collection> transportClientPlugins() { + return pluginList(Netty4Plugin.class); + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java new file mode 100644 index 00000000000..58c6a476fe8 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java @@ -0,0 +1,490 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4; + +import io.netty.buffer.ByteBufAllocator; +import io.netty.buffer.Unpooled; +import io.netty.channel.Channel; +import io.netty.channel.ChannelConfig; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelId; +import io.netty.channel.ChannelMetadata; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.ChannelProgressivePromise; +import io.netty.channel.ChannelPromise; +import io.netty.channel.EventLoop; +import io.netty.handler.codec.http.DefaultFullHttpRequest; +import io.netty.handler.codec.http.FullHttpRequest; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpResponse; +import io.netty.handler.codec.http.HttpVersion; +import io.netty.util.Attribute; +import io.netty.util.AttributeKey; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.http.HttpTransportSettings; +import org.elasticsearch.http.netty4.cors.Netty4CorsHandler; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.netty4.Netty4Utils; +import org.junit.After; +import org.junit.Before; + +import java.net.SocketAddress; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_METHODS; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ENABLED; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class Netty4HttpChannelTests extends ESTestCase { + + private NetworkService networkService; + private ThreadPool threadPool; + private MockBigArrays bigArrays; + + @Before + public void setup() throws Exception { + networkService = new NetworkService(Settings.EMPTY); + threadPool = new TestThreadPool("test"); + bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); + } + + @After + public void shutdown() throws Exception { + if (threadPool != null) { + threadPool.shutdownNow(); + } + } + + public void testResponse() { + final FullHttpResponse response = executeRequest(Settings.EMPTY, "request-host"); + assertThat(response.content(), equalTo(Netty4Utils.toByteBuf(new TestResponse().content()))); + } + + public void testCorsEnabledWithoutAllowOrigins() { + // Set up a HTTP transport with only the CORS enabled setting + Settings settings = Settings.builder() + .put(HttpTransportSettings.SETTING_CORS_ENABLED.getKey(), true) + .build(); + HttpResponse response = executeRequest(settings, "remote-host", "request-host"); + // inspect response and validate + assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), nullValue()); + } + + public void testCorsEnabledWithAllowOrigins() { + final String originValue = "remote-host"; + // create a http transport with CORS enabled and allow origin configured + Settings settings = Settings.builder() + .put(SETTING_CORS_ENABLED.getKey(), true) + .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue) + .build(); + HttpResponse response = executeRequest(settings, originValue, "request-host"); + // inspect response and validate + assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue()); + String allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN); + assertThat(allowedOrigins, is(originValue)); + } + + public void testCorsAllowOriginWithSameHost() { + String originValue = "remote-host"; + String host = "remote-host"; + // create a http transport with CORS enabled + Settings settings = Settings.builder() + .put(SETTING_CORS_ENABLED.getKey(), true) + .build(); + HttpResponse response = executeRequest(settings, originValue, host); + // inspect response and validate + assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue()); + String allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN); + assertThat(allowedOrigins, is(originValue)); + + originValue = "http://" + originValue; + response = executeRequest(settings, originValue, host); + assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue()); + allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN); + assertThat(allowedOrigins, is(originValue)); + + originValue = originValue + ":5555"; + host = host + ":5555"; + response = executeRequest(settings, originValue, host); + assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue()); + allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN); + assertThat(allowedOrigins, is(originValue)); + + originValue = originValue.replace("http", "https"); + response = executeRequest(settings, originValue, host); + assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue()); + allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN); + assertThat(allowedOrigins, is(originValue)); + } + + public void testThatStringLiteralWorksOnMatch() { + final String originValue = "remote-host"; + Settings settings = Settings.builder() + .put(SETTING_CORS_ENABLED.getKey(), true) + .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue) + .put(SETTING_CORS_ALLOW_METHODS.getKey(), "get, options, post") + .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true) + .build(); + HttpResponse response = executeRequest(settings, originValue, "request-host"); + // inspect response and validate + assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue()); + String allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN); + assertThat(allowedOrigins, is(originValue)); + assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_CREDENTIALS), equalTo("true")); + } + + public void testThatAnyOriginWorks() { + final String originValue = Netty4CorsHandler.ANY_ORIGIN; + Settings settings = Settings.builder() + .put(SETTING_CORS_ENABLED.getKey(), true) + .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue) + .build(); + HttpResponse response = executeRequest(settings, originValue, "request-host"); + // inspect response and validate + assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue()); + String allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN); + assertThat(allowedOrigins, is(originValue)); + assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_CREDENTIALS), nullValue()); + } + + public void testHeadersSet() { + Settings settings = Settings.builder().build(); + try (Netty4HttpServerTransport httpServerTransport = + new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool)) { + httpServerTransport.start(); + final FullHttpRequest httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/"); + httpRequest.headers().add(HttpHeaderNames.ORIGIN, "remote"); + final WriteCapturingChannel writeCapturingChannel = new WriteCapturingChannel(); + Netty4HttpRequest request = new Netty4HttpRequest(httpRequest, writeCapturingChannel); + + // send a response + Netty4HttpChannel channel = + new Netty4HttpChannel(httpServerTransport, request, null, randomBoolean(), threadPool.getThreadContext()); + TestResponse resp = new TestResponse(); + final String customHeader = "custom-header"; + final String customHeaderValue = "xyz"; + resp.addHeader(customHeader, customHeaderValue); + channel.sendResponse(resp); + + // inspect what was written + List writtenObjects = writeCapturingChannel.getWrittenObjects(); + assertThat(writtenObjects.size(), is(1)); + HttpResponse response = (HttpResponse) writtenObjects.get(0); + assertThat(response.headers().get("non-existent-header"), nullValue()); + assertThat(response.headers().get(customHeader), equalTo(customHeaderValue)); + assertThat(response.headers().get(HttpHeaderNames.CONTENT_LENGTH), equalTo(Integer.toString(resp.content().length()))); + assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE), equalTo(resp.contentType())); + } + } + + private FullHttpResponse executeRequest(final Settings settings, final String host) { + return executeRequest(settings, null, host); + } + + private FullHttpResponse executeRequest(final Settings settings, final String originValue, final String host) { + // construct request and send it over the transport layer + try (Netty4HttpServerTransport httpServerTransport = + new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool)) { + httpServerTransport.start(); + final FullHttpRequest httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/"); + if (originValue != null) { + httpRequest.headers().add(HttpHeaderNames.ORIGIN, originValue); + } + httpRequest.headers().add(HttpHeaderNames.HOST, host); + final WriteCapturingChannel writeCapturingChannel = new WriteCapturingChannel(); + final Netty4HttpRequest request = new Netty4HttpRequest(httpRequest, writeCapturingChannel); + + Netty4HttpChannel channel = + new Netty4HttpChannel(httpServerTransport, request, null, randomBoolean(), threadPool.getThreadContext()); + channel.sendResponse(new TestResponse()); + + // get the response + List writtenObjects = writeCapturingChannel.getWrittenObjects(); + assertThat(writtenObjects.size(), is(1)); + return (FullHttpResponse) writtenObjects.get(0); + } + } + + private static class WriteCapturingChannel implements Channel { + + private List writtenObjects = new ArrayList<>(); + + @Override + public ChannelId id() { + return null; + } + + @Override + public EventLoop eventLoop() { + return null; + } + + @Override + public Channel parent() { + return null; + } + + @Override + public ChannelConfig config() { + return null; + } + + @Override + public boolean isOpen() { + return false; + } + + @Override + public boolean isRegistered() { + return false; + } + + @Override + public boolean isActive() { + return false; + } + + @Override + public ChannelMetadata metadata() { + return null; + } + + @Override + public SocketAddress localAddress() { + return null; + } + + @Override + public SocketAddress remoteAddress() { + return null; + } + + @Override + public ChannelFuture closeFuture() { + return null; + } + + @Override + public boolean isWritable() { + return false; + } + + @Override + public long bytesBeforeUnwritable() { + return 0; + } + + @Override + public long bytesBeforeWritable() { + return 0; + } + + @Override + public Unsafe unsafe() { + return null; + } + + @Override + public ChannelPipeline pipeline() { + return null; + } + + @Override + public ByteBufAllocator alloc() { + return null; + } + + @Override + public Channel read() { + return null; + } + + @Override + public Channel flush() { + return null; + } + + @Override + public ChannelFuture bind(SocketAddress localAddress) { + return null; + } + + @Override + public ChannelFuture connect(SocketAddress remoteAddress) { + return null; + } + + @Override + public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress) { + return null; + } + + @Override + public ChannelFuture disconnect() { + return null; + } + + @Override + public ChannelFuture close() { + return null; + } + + @Override + public ChannelFuture deregister() { + return null; + } + + @Override + public ChannelFuture bind(SocketAddress localAddress, ChannelPromise promise) { + return null; + } + + @Override + public ChannelFuture connect(SocketAddress remoteAddress, ChannelPromise promise) { + return null; + } + + @Override + public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) { + return null; + } + + @Override + public ChannelFuture disconnect(ChannelPromise promise) { + return null; + } + + @Override + public ChannelFuture close(ChannelPromise promise) { + return null; + } + + @Override + public ChannelFuture deregister(ChannelPromise promise) { + return null; + } + + @Override + public ChannelFuture write(Object msg) { + writtenObjects.add(msg); + return null; + } + + @Override + public ChannelFuture write(Object msg, ChannelPromise promise) { + writtenObjects.add(msg); + return null; + } + + @Override + public ChannelFuture writeAndFlush(Object msg, ChannelPromise promise) { + writtenObjects.add(msg); + return null; + } + + @Override + public ChannelFuture writeAndFlush(Object msg) { + writtenObjects.add(msg); + return null; + } + + @Override + public ChannelPromise newPromise() { + return null; + } + + @Override + public ChannelProgressivePromise newProgressivePromise() { + return null; + } + + @Override + public ChannelFuture newSucceededFuture() { + return null; + } + + @Override + public ChannelFuture newFailedFuture(Throwable cause) { + return null; + } + + @Override + public ChannelPromise voidPromise() { + return null; + } + + @Override + public Attribute attr(AttributeKey key) { + return null; + } + + @Override + public boolean hasAttr(AttributeKey key) { + return false; + } + + @Override + public int compareTo(Channel o) { + return 0; + } + + List getWrittenObjects() { + return writtenObjects; + } + + } + + private static class TestResponse extends RestResponse { + + @Override + public String contentType() { + return "text"; + } + + @Override + public BytesReference content() { + return Netty4Utils.toBytesReference(Unpooled.copiedBuffer("content", StandardCharsets.UTF_8)); + } + + @Override + public RestStatus status() { + return RestStatus.OK; + } + + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java new file mode 100644 index 00000000000..31e7a6112f9 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java @@ -0,0 +1,190 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.http.netty4; + +import io.netty.bootstrap.Bootstrap; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.SimpleChannelInboundHandler; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioSocketChannel; +import io.netty.handler.codec.http.DefaultFullHttpRequest; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpObject; +import io.netty.handler.codec.http.HttpObjectAggregator; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpRequestEncoder; +import io.netty.handler.codec.http.HttpResponse; +import io.netty.handler.codec.http.HttpResponseDecoder; +import io.netty.handler.codec.http.HttpVersion; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; + +import java.io.Closeable; +import java.net.SocketAddress; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CountDownLatch; + +import static io.netty.handler.codec.http.HttpHeaderNames.HOST; +import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; + +/** + * Tiny helper to send http requests over netty. + */ +class Netty4HttpClient implements Closeable { + + static Collection returnHttpResponseBodies(Collection responses) { + List list = new ArrayList<>(responses.size()); + for (FullHttpResponse response : responses) { + list.add(response.content().toString(StandardCharsets.UTF_8)); + } + return list; + } + + static Collection returnOpaqueIds(Collection responses) { + List list = new ArrayList<>(responses.size()); + for (HttpResponse response : responses) { + list.add(response.headers().get("X-Opaque-Id")); + } + return list; + } + + private final Bootstrap clientBootstrap; + + Netty4HttpClient() { + clientBootstrap = new Bootstrap().channel(NioSocketChannel.class).group(new NioEventLoopGroup()); + } + + public Collection get(SocketAddress remoteAddress, String... uris) throws InterruptedException { + Collection requests = new ArrayList<>(uris.length); + for (int i = 0; i < uris.length; i++) { + final HttpRequest httpRequest = new DefaultFullHttpRequest(HTTP_1_1, HttpMethod.GET, uris[i]); + httpRequest.headers().add(HOST, "localhost"); + httpRequest.headers().add("X-Opaque-ID", String.valueOf(i)); + requests.add(httpRequest); + } + return sendRequests(remoteAddress, requests); + } + + @SafeVarargs // Safe not because it doesn't do anything with the type parameters but because it won't leak them into other methods. + public final Collection post(SocketAddress remoteAddress, Tuple... urisAndBodies) + throws InterruptedException { + return processRequestsWithBody(HttpMethod.POST, remoteAddress, urisAndBodies); + } + + @SafeVarargs // Safe not because it doesn't do anything with the type parameters but because it won't leak them into other methods. + public final Collection put(SocketAddress remoteAddress, Tuple... urisAndBodies) + throws InterruptedException { + return processRequestsWithBody(HttpMethod.PUT, remoteAddress, urisAndBodies); + } + + private Collection processRequestsWithBody(HttpMethod method, SocketAddress remoteAddress, Tuple... urisAndBodies) throws InterruptedException { + Collection requests = new ArrayList<>(urisAndBodies.length); + for (Tuple uriAndBody : urisAndBodies) { + ByteBuf content = Unpooled.copiedBuffer(uriAndBody.v2(), StandardCharsets.UTF_8); + HttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, method, uriAndBody.v1(), content); + request.headers().add(HttpHeaderNames.HOST, "localhost"); + request.headers().add(HttpHeaderNames.CONTENT_LENGTH, content.readableBytes()); + requests.add(request); + } + return sendRequests(remoteAddress, requests); + } + + private synchronized Collection sendRequests( + final SocketAddress remoteAddress, + final Collection requests) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(requests.size()); + final Collection content = Collections.synchronizedList(new ArrayList<>(requests.size())); + + clientBootstrap.handler(new CountDownLatchHandler(latch, content)); + + ChannelFuture channelFuture = null; + try { + channelFuture = clientBootstrap.connect(remoteAddress); + channelFuture.sync(); + + for (HttpRequest request : requests) { + channelFuture.channel().writeAndFlush(request); + } + latch.await(); + + } finally { + if (channelFuture != null) { + channelFuture.channel().close().sync(); + } + } + + return content; + } + + @Override + public void close() { + clientBootstrap.config().group().shutdownGracefully().awaitUninterruptibly(); + } + + /** + * helper factory which adds returned data to a list and uses a count down latch to decide when done + */ + private static class CountDownLatchHandler extends ChannelInitializer { + + private final CountDownLatch latch; + private final Collection content; + + CountDownLatchHandler(final CountDownLatch latch, final Collection content) { + this.latch = latch; + this.content = content; + } + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + final int maxContentLength = new ByteSizeValue(100, ByteSizeUnit.MB).bytesAsInt(); + ch.pipeline().addLast(new HttpResponseDecoder()); + ch.pipeline().addLast(new HttpRequestEncoder()); + ch.pipeline().addLast(new HttpObjectAggregator(maxContentLength)); + ch.pipeline().addLast(new SimpleChannelInboundHandler() { + @Override + protected void channelRead0(ChannelHandlerContext ctx, HttpObject msg) throws Exception { + final FullHttpResponse response = (FullHttpResponse) msg; + content.add(response.copy()); + latch.countDown(); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + latch.countDown(); + } + }); + } + + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPublishPortTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPublishPortTests.java new file mode 100644 index 00000000000..03e09f28d25 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPublishPortTests.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4; + +import org.elasticsearch.common.network.NetworkUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.http.BindHttpException; +import org.elasticsearch.http.HttpTransportSettings; +import org.elasticsearch.test.ESTestCase; + +import java.net.UnknownHostException; +import java.util.ArrayList; +import java.util.List; + +import static java.net.InetAddress.getByName; +import static java.util.Arrays.asList; +import static org.elasticsearch.http.netty4.Netty4HttpServerTransport.resolvePublishPort; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class Netty4HttpPublishPortTests extends ESTestCase { + + public void testHttpPublishPort() throws Exception { + int boundPort = randomIntBetween(9000, 9100); + int otherBoundPort = randomIntBetween(9200, 9300); + + int publishPort = resolvePublishPort(Settings.builder().put(HttpTransportSettings.SETTING_HTTP_PUBLISH_PORT.getKey(), 9080).build(), + randomAddresses(), getByName("127.0.0.2")); + assertThat("Publish port should be explicitly set to 9080", publishPort, equalTo(9080)); + + publishPort = resolvePublishPort(Settings.EMPTY, asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), + getByName("127.0.0.1")); + assertThat("Publish port should be derived from matched address", publishPort, equalTo(boundPort)); + + publishPort = resolvePublishPort(Settings.EMPTY, asList(address("127.0.0.1", boundPort), address("127.0.0.2", boundPort)), + getByName("127.0.0.3")); + assertThat("Publish port should be derived from unique port of bound addresses", publishPort, equalTo(boundPort)); + + final BindHttpException e = + expectThrows(BindHttpException.class, + () -> resolvePublishPort( + Settings.EMPTY, + asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), + getByName("127.0.0.3"))); + assertThat(e.getMessage(), containsString("Failed to auto-resolve http publish port")); + + publishPort = resolvePublishPort(Settings.EMPTY, asList(address("0.0.0.0", boundPort), address("127.0.0.2", otherBoundPort)), + getByName("127.0.0.1")); + assertThat("Publish port should be derived from matching wildcard address", publishPort, equalTo(boundPort)); + + if (NetworkUtils.SUPPORTS_V6) { + publishPort = resolvePublishPort(Settings.EMPTY, asList(address("0.0.0.0", boundPort), address("127.0.0.2", otherBoundPort)), + getByName("::1")); + assertThat("Publish port should be derived from matching wildcard address", publishPort, equalTo(boundPort)); + } + } + + private InetSocketTransportAddress address(String host, int port) throws UnknownHostException { + return new InetSocketTransportAddress(getByName(host), port); + } + + private InetSocketTransportAddress randomAddress() throws UnknownHostException { + return address("127.0.0." + randomIntBetween(1, 100), randomIntBetween(9200, 9300)); + } + + private List randomAddresses() throws UnknownHostException { + List addresses = new ArrayList<>(); + for (int i = 0; i < randomIntBetween(1, 5); i++) { + addresses.add(randomAddress()); + } + return addresses; + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestSizeLimitIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestSizeLimitIT.java new file mode 100644 index 00000000000..f1e2e922cbd --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestSizeLimitIT.java @@ -0,0 +1,131 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4; + +import io.netty.handler.codec.http.FullHttpResponse; +import org.elasticsearch.ESNetty4IntegTestCase; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.test.ESIntegTestCase.Scope; +import io.netty.handler.codec.http.HttpResponse; +import io.netty.handler.codec.http.HttpResponseStatus; + +import java.util.Collection; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; + +/** + * This test checks that in-flight requests are limited on HTTP level and that requests that are excluded from limiting can pass. + * + * As the same setting is also used to limit in-flight requests on transport level, we avoid transport messages by forcing + * a single node "cluster". We also force test infrastructure to use the node client instead of the transport client for the same reason. + */ +@ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numClientNodes = 0, numDataNodes = 1, transportClientRatio = 0) +public class Netty4HttpRequestSizeLimitIT extends ESNetty4IntegTestCase { + + private static final ByteSizeValue LIMIT = new ByteSizeValue(2, ByteSizeUnit.KB); + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) + .put(HierarchyCircuitBreakerService.IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), LIMIT) + .build(); + } + + public void testLimitsInFlightRequests() throws Exception { + ensureGreen(); + + // we use the limit size as a (very) rough indication on how many requests we should sent to hit the limit + int numRequests = LIMIT.bytesAsInt() / 100; + + StringBuilder bulkRequest = new StringBuilder(); + for (int i = 0; i < numRequests; i++) { + bulkRequest.append("{\"index\": {}}"); + bulkRequest.append(System.lineSeparator()); + bulkRequest.append("{ \"field\" : \"value\" }"); + bulkRequest.append(System.lineSeparator()); + } + + @SuppressWarnings("unchecked") + Tuple[] requests = new Tuple[150]; + for (int i = 0; i < requests.length; i++) { + requests[i] = Tuple.tuple("/index/type/_bulk", bulkRequest); + } + + HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class); + InetSocketTransportAddress inetSocketTransportAddress = (InetSocketTransportAddress) randomFrom(httpServerTransport.boundAddress + ().boundAddresses()); + + try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) { + Collection singleResponse = nettyHttpClient.post(inetSocketTransportAddress.address(), requests[0]); + assertThat(singleResponse, hasSize(1)); + assertAtLeastOnceExpectedStatus(singleResponse, HttpResponseStatus.OK); + + Collection multipleResponses = nettyHttpClient.post(inetSocketTransportAddress.address(), requests); + assertThat(multipleResponses, hasSize(requests.length)); + assertAtLeastOnceExpectedStatus(multipleResponses, HttpResponseStatus.SERVICE_UNAVAILABLE); + } + } + + public void testDoesNotLimitExcludedRequests() throws Exception { + ensureGreen(); + + @SuppressWarnings("unchecked") + Tuple[] requestUris = new Tuple[1500]; + for (int i = 0; i < requestUris.length; i++) { + requestUris[i] = Tuple.tuple("/_cluster/settings", + "{ \"transient\": {\"indices.ttl.interval\": \"40s\" } }"); + } + + HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class); + InetSocketTransportAddress inetSocketTransportAddress = (InetSocketTransportAddress) randomFrom(httpServerTransport.boundAddress + ().boundAddresses()); + + try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) { + Collection responses = nettyHttpClient.put(inetSocketTransportAddress.address(), requestUris); + assertThat(responses, hasSize(requestUris.length)); + assertAllInExpectedStatus(responses, HttpResponseStatus.OK); + } + } + + private void assertAtLeastOnceExpectedStatus(Collection responses, HttpResponseStatus expectedStatus) { + long countExpectedStatus = responses.stream().filter(r -> r.status().equals(expectedStatus)).count(); + assertThat("Expected at least one request with status [" + expectedStatus + "]", countExpectedStatus, greaterThan(0L)); + } + + private void assertAllInExpectedStatus(Collection responses, HttpResponseStatus expectedStatus) { + long countUnexpectedStatus = responses.stream().filter(r -> r.status().equals(expectedStatus) == false).count(); + assertThat("Expected all requests with status [" + expectedStatus + "] but [" + countUnexpectedStatus + + "] requests had a different one", countUnexpectedStatus, equalTo(0L)); + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java new file mode 100644 index 00000000000..d8e61908011 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -0,0 +1,260 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.ChannelHandler; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.http.DefaultFullHttpResponse; +import io.netty.handler.codec.http.FullHttpRequest; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.netty.handler.codec.http.HttpVersion; +import io.netty.handler.codec.http.QueryStringDecoder; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.http.netty4.pipelining.HttpPipelinedRequest; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.hasSize; + +/** + * This test just tests, if he pipelining works in general with out any connection the Elasticsearch handler + */ +public class Netty4HttpServerPipeliningTests extends ESTestCase { + private NetworkService networkService; + private ThreadPool threadPool; + private MockBigArrays bigArrays; + + @Before + public void setup() throws Exception { + networkService = new NetworkService(Settings.EMPTY); + threadPool = new TestThreadPool("test"); + bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); + } + + @After + public void shutdown() throws Exception { + if (threadPool != null) { + threadPool.shutdownNow(); + } + } + + public void testThatHttpPipeliningWorksWhenEnabled() throws Exception { + final Settings settings = Settings.builder() + .put("http.pipelining", true) + .put("http.port", "0") + .build(); + try (final HttpServerTransport httpServerTransport = new CustomNettyHttpServerTransport(settings)) { + httpServerTransport.start(); + final InetSocketTransportAddress transportAddress = + (InetSocketTransportAddress) randomFrom(httpServerTransport.boundAddress().boundAddresses()); + + final int numberOfRequests = randomIntBetween(4, 16); + final List requests = new ArrayList<>(numberOfRequests); + for (int i = 0; i < numberOfRequests; i++) { + if (rarely()) { + requests.add("/slow?sleep=" + scaledRandomIntBetween(500, 1000)); + } else { + requests.add("/" + i); + } + } + + try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) { + Collection responses = nettyHttpClient.get(transportAddress.address(), requests.toArray(new String[]{})); + Collection responseBodies = Netty4HttpClient.returnHttpResponseBodies(responses); + assertThat(responseBodies, contains(requests.toArray())); + } + } + } + + public void testThatHttpPipeliningCanBeDisabled() throws Exception { + final Settings settings = Settings.builder() + .put("http.pipelining", false) + .put("http.port", "0") + .build(); + try (final HttpServerTransport httpServerTransport = new CustomNettyHttpServerTransport(settings)) { + httpServerTransport.start(); + final InetSocketTransportAddress transportAddress = + (InetSocketTransportAddress) randomFrom(httpServerTransport.boundAddress().boundAddresses()); + + final int numberOfRequests = randomIntBetween(4, 16); + final int numberOfSlowRequests = scaledRandomIntBetween(1, numberOfRequests); + final List requests = new ArrayList<>(numberOfRequests); + for (int i = 0; i < numberOfRequests - numberOfSlowRequests; i++) { + requests.add("/" + i); + } + for (int i = 0; i < numberOfSlowRequests; i++) { + requests.add("/slow?sleep=" + sleep(i)); + } + + try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) { + Collection responses = nettyHttpClient.get(transportAddress.address(), requests.toArray(new String[]{})); + List responseBodies = new ArrayList<>(Netty4HttpClient.returnHttpResponseBodies(responses)); + // we cannot be sure about the order of the fast requests, but the slow ones should have to be last + assertThat(responseBodies, hasSize(numberOfRequests)); + for (int i = 0; i < numberOfSlowRequests; i++) { + assertThat(responseBodies.get(numberOfRequests - numberOfSlowRequests + i), equalTo("/slow?sleep=" + sleep(i))); + } + } + } + } + + + private int sleep(int index) { + return 500 + 100 * (index + 1); + } + + class CustomNettyHttpServerTransport extends Netty4HttpServerTransport { + + private final ExecutorService executorService = Executors.newCachedThreadPool(); + + CustomNettyHttpServerTransport(final Settings settings) { + super(settings, + Netty4HttpServerPipeliningTests.this.networkService, + Netty4HttpServerPipeliningTests.this.bigArrays, + Netty4HttpServerPipeliningTests.this.threadPool); + } + + @Override + public ChannelHandler configureServerChannelHandler() { + return new CustomHttpChannelHandler(this, executorService, Netty4HttpServerPipeliningTests.this.threadPool.getThreadContext()); + } + + @Override + protected void doClose() { + executorService.shutdown(); + super.doClose(); + } + + } + + private class CustomHttpChannelHandler extends Netty4HttpServerTransport.HttpChannelHandler { + + private final ExecutorService executorService; + + CustomHttpChannelHandler(Netty4HttpServerTransport transport, ExecutorService executorService, ThreadContext threadContext) { + super(transport, randomBoolean(), threadContext); + this.executorService = executorService; + } + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + super.initChannel(ch); + ch.pipeline().replace("handler", "handler", new PossiblySlowUpstreamHandler(executorService)); + } + + } + + class PossiblySlowUpstreamHandler extends SimpleChannelInboundHandler { + + private final ExecutorService executorService; + + PossiblySlowUpstreamHandler(ExecutorService executorService) { + this.executorService = executorService; + } + + @Override + protected void channelRead0(ChannelHandlerContext ctx, Object msg) throws Exception { + executorService.submit(new PossiblySlowRunnable(ctx, msg)); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + logger.info("Caught exception", cause); + ctx.channel().close().sync(); + } + + } + + class PossiblySlowRunnable implements Runnable { + + private ChannelHandlerContext ctx; + private HttpPipelinedRequest pipelinedRequest; + private FullHttpRequest fullHttpRequest; + + PossiblySlowRunnable(ChannelHandlerContext ctx, Object msg) { + this.ctx = ctx; + if (msg instanceof HttpPipelinedRequest) { + this.pipelinedRequest = (HttpPipelinedRequest) msg; + } else if (msg instanceof FullHttpRequest) { + this.fullHttpRequest = (FullHttpRequest) msg; + } + } + + @Override + public void run() { + final String uri; + if (pipelinedRequest != null && pipelinedRequest.last() instanceof FullHttpRequest) { + uri = ((FullHttpRequest) pipelinedRequest.last()).uri(); + } else { + uri = fullHttpRequest.uri(); + } + + final ByteBuf buffer = Unpooled.copiedBuffer(uri, StandardCharsets.UTF_8); + + final DefaultFullHttpResponse httpResponse = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK, buffer); + httpResponse.headers().add(HttpHeaderNames.CONTENT_LENGTH, buffer.readableBytes()); + + final QueryStringDecoder decoder = new QueryStringDecoder(uri); + + final int timeout = + uri.startsWith("/slow") && decoder.parameters().containsKey("sleep") ? + Integer.valueOf(decoder.parameters().get("sleep").get(0)) : 0; + if (timeout > 0) { + try { + Thread.sleep(timeout); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + if (pipelinedRequest != null) { + ctx.writeAndFlush(pipelinedRequest.createHttpResponse(httpResponse, ctx.channel().newPromise())); + } else { + ctx.writeAndFlush(httpResponse); + } + } + + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java new file mode 100644 index 00000000000..dc63907265c --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4; + +import io.netty.handler.codec.http.HttpMethod; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.http.netty4.cors.Netty4CorsConfig; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_HEADERS; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_METHODS; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ENABLED; +import static org.hamcrest.Matchers.equalTo; + +/** + * Tests for the {@link Netty4HttpServerTransport} class. + */ +public class Netty4HttpServerTransportTests extends ESTestCase { + + private NetworkService networkService; + private ThreadPool threadPool; + private MockBigArrays bigArrays; + + @Before + public void setup() throws Exception { + networkService = new NetworkService(Settings.EMPTY); + threadPool = new TestThreadPool("test"); + bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); + } + + @After + public void shutdown() throws Exception { + if (threadPool != null) { + threadPool.shutdownNow(); + } + threadPool = null; + networkService = null; + bigArrays = null; + } + + public void testCorsConfig() { + final Set methods = new HashSet<>(Arrays.asList("get", "options", "post")); + final Set headers = new HashSet<>(Arrays.asList("Content-Type", "Content-Length")); + final Settings settings = Settings.builder() + .put(SETTING_CORS_ENABLED.getKey(), true) + .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "*") + .put(SETTING_CORS_ALLOW_METHODS.getKey(), Strings.collectionToCommaDelimitedString(methods)) + .put(SETTING_CORS_ALLOW_HEADERS.getKey(), Strings.collectionToCommaDelimitedString(headers)) + .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true) + .build(); + final Netty4HttpServerTransport transport = new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool); + final Netty4CorsConfig corsConfig = transport.getCorsConfig(); + assertThat(corsConfig.isAnyOriginSupported(), equalTo(true)); + assertThat(corsConfig.allowedRequestHeaders(), equalTo(headers)); + assertThat(corsConfig.allowedRequestMethods().stream().map(HttpMethod::name).collect(Collectors.toSet()), equalTo(methods)); + transport.close(); + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningDisabledIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningDisabledIT.java new file mode 100644 index 00000000000..4ea46c651fc --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningDisabledIT.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.http.netty4; + +import io.netty.handler.codec.http.FullHttpResponse; +import org.elasticsearch.ESNetty4IntegTestCase; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.test.ESIntegTestCase.Scope; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Locale; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.hasSize; + +@ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1) +public class Netty4PipeliningDisabledIT extends ESNetty4IntegTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) + .put("http.pipelining", false) + .build(); + } + + public void testThatNettyHttpServerDoesNotSupportPipelining() throws Exception { + ensureGreen(); + String[] requests = new String[] {"/", "/_nodes/stats", "/", "/_cluster/state", "/", "/_nodes", "/"}; + + HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class); + TransportAddress[] boundAddresses = httpServerTransport.boundAddress().boundAddresses(); + InetSocketTransportAddress inetSocketTransportAddress = (InetSocketTransportAddress) randomFrom(boundAddresses); + + try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) { + Collection responses = nettyHttpClient.get(inetSocketTransportAddress.address(), requests); + assertThat(responses, hasSize(requests.length)); + + List opaqueIds = new ArrayList<>(Netty4HttpClient.returnOpaqueIds(responses)); + + assertResponsesOutOfOrder(opaqueIds); + } + } + + /** + * checks if all responses are there, but also tests that they are out of order because pipelining is disabled + */ + private void assertResponsesOutOfOrder(List opaqueIds) { + String message = String.format(Locale.ROOT, "Expected returned http message ids to be in any order of: %s", opaqueIds); + assertThat(message, opaqueIds, containsInAnyOrder("0", "1", "2", "3", "4", "5", "6")); + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningEnabledIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningEnabledIT.java new file mode 100644 index 00000000000..c2222562d72 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningEnabledIT.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4; + +import io.netty.handler.codec.http.FullHttpResponse; +import org.elasticsearch.ESNetty4IntegTestCase; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.test.ESIntegTestCase.Scope; + +import java.util.Collection; +import java.util.Locale; + +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; + +@ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1) +public class Netty4PipeliningEnabledIT extends ESNetty4IntegTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) + .put("http.pipelining", true) + .build(); + } + + public void testThatNettyHttpServerSupportsPipelining() throws Exception { + String[] requests = new String[]{"/", "/_nodes/stats", "/", "/_cluster/state", "/"}; + + HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class); + TransportAddress[] boundAddresses = httpServerTransport.boundAddress().boundAddresses(); + InetSocketTransportAddress inetSocketTransportAddress = (InetSocketTransportAddress) randomFrom(boundAddresses); + + try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) { + Collection responses = nettyHttpClient.get(inetSocketTransportAddress.address(), requests); + assertThat(responses, hasSize(5)); + + Collection opaqueIds = Netty4HttpClient.returnOpaqueIds(responses); + assertOpaqueIdsInOrder(opaqueIds); + } + } + + private void assertOpaqueIdsInOrder(Collection opaqueIds) { + // check if opaque ids are monotonically increasing + int i = 0; + String msg = String.format(Locale.ROOT, "Expected list of opaque ids to be monotonically increasing, got [%s]", opaqueIds); + for (String opaqueId : opaqueIds) { + assertThat(msg, opaqueId, is(String.valueOf(i++))); + } + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java new file mode 100644 index 00000000000..6566428efa3 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; + +import java.io.IOException; + +public class Netty4RestIT extends ESRestTestCase { + + public Netty4RestIT(@Name("yaml") RestTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return ESRestTestCase.createParameters(0, 1); + } + +} \ No newline at end of file diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java new file mode 100644 index 00000000000..1feb92223a3 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java @@ -0,0 +1,262 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4.pipelining; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.buffer.Unpooled; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelPromise; +import io.netty.channel.SimpleChannelInboundHandler; +import io.netty.channel.embedded.EmbeddedChannel; +import io.netty.handler.codec.http.DefaultFullHttpRequest; +import io.netty.handler.codec.http.DefaultFullHttpResponse; +import io.netty.handler.codec.http.DefaultHttpRequest; +import io.netty.handler.codec.http.FullHttpRequest; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpVersion; +import io.netty.handler.codec.http.LastHttpContent; +import io.netty.handler.codec.http.QueryStringDecoder; +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedTransferQueue; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH; +import static io.netty.handler.codec.http.HttpResponseStatus.OK; +import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; +import static org.hamcrest.core.Is.is; + +public class Netty4HttpPipeliningHandlerTests extends ESTestCase { + + private ExecutorService executorService = Executors.newFixedThreadPool(randomIntBetween(4, 8)); + private Map waitingRequests = new ConcurrentHashMap<>(); + private Map finishingRequests = new ConcurrentHashMap<>(); + + @After + public void tearDown() throws Exception { + waitingRequests.keySet().forEach(this::finishRequest); + shutdownExecutorService(); + super.tearDown(); + } + + private CountDownLatch finishRequest(String url) { + waitingRequests.get(url).countDown(); + return finishingRequests.get(url); + } + + private void shutdownExecutorService() throws InterruptedException { + if (!executorService.isShutdown()) { + executorService.shutdown(); + executorService.awaitTermination(10, TimeUnit.SECONDS); + } + } + + public void testThatPipeliningWorksWithFastSerializedRequests() throws InterruptedException { + final int numberOfRequests = randomIntBetween(2, 128); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new HttpPipeliningHandler(numberOfRequests), new WorkEmulatorHandler()); + + for (int i = 0; i < numberOfRequests; i++) { + embeddedChannel.writeInbound(createHttpRequest("/" + String.valueOf(i))); + } + + final List latches = new ArrayList<>(); + for (final String url : waitingRequests.keySet()) { + latches.add(finishRequest(url)); + } + + for (final CountDownLatch latch : latches) { + latch.await(); + } + + embeddedChannel.flush(); + + for (int i = 0; i < numberOfRequests; i++) { + assertReadHttpMessageHasContent(embeddedChannel, String.valueOf(i)); + } + + assertTrue(embeddedChannel.isOpen()); + } + + public void testThatPipeliningWorksWhenSlowRequestsInDifferentOrder() throws InterruptedException { + final int numberOfRequests = randomIntBetween(2, 128); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new HttpPipeliningHandler(numberOfRequests), new WorkEmulatorHandler()); + + for (int i = 0; i < numberOfRequests; i++) { + embeddedChannel.writeInbound(createHttpRequest("/" + String.valueOf(i))); + } + + // random order execution + final List urls = new ArrayList<>(waitingRequests.keySet()); + Randomness.shuffle(urls); + final List latches = new ArrayList<>(); + for (final String url : urls) { + latches.add(finishRequest(url)); + } + + for (final CountDownLatch latch : latches) { + latch.await(); + } + + embeddedChannel.flush(); + + for (int i = 0; i < numberOfRequests; i++) { + assertReadHttpMessageHasContent(embeddedChannel, String.valueOf(i)); + } + + assertTrue(embeddedChannel.isOpen()); + } + + public void testThatPipeliningWorksWithChunkedRequests() throws InterruptedException { + final int numberOfRequests = randomIntBetween(2, 128); + final EmbeddedChannel embeddedChannel = + new EmbeddedChannel( + new AggregateUrisAndHeadersHandler(), + new HttpPipeliningHandler(numberOfRequests), + new WorkEmulatorHandler()); + + for (int i = 0; i < numberOfRequests; i++) { + final DefaultHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/" + i); + embeddedChannel.writeInbound(request); + embeddedChannel.writeInbound(LastHttpContent.EMPTY_LAST_CONTENT); + } + + final List latches = new ArrayList<>(); + for (int i = numberOfRequests - 1; i >= 0; i--) { + latches.add(finishRequest(Integer.toString(i))); + } + + for (final CountDownLatch latch : latches) { + latch.await(); + } + + embeddedChannel.flush(); + + for (int i = 0; i < numberOfRequests; i++) { + assertReadHttpMessageHasContent(embeddedChannel, Integer.toString(i)); + } + + assertTrue(embeddedChannel.isOpen()); + } + + public void testThatPipeliningClosesConnectionWithTooManyEvents() throws InterruptedException { + final int numberOfRequests = randomIntBetween(2, 128); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new HttpPipeliningHandler(numberOfRequests), new WorkEmulatorHandler()); + + for (int i = 0; i < 1 + numberOfRequests + 1; i++) { + embeddedChannel.writeInbound(createHttpRequest("/" + Integer.toString(i))); + } + + final List latches = new ArrayList<>(); + final List requests = IntStream.range(1, numberOfRequests + 1).mapToObj(r -> r).collect(Collectors.toList()); + Randomness.shuffle(requests); + + for (final Integer request : requests) { + latches.add(finishRequest(request.toString())); + } + + for (final CountDownLatch latch : latches) { + latch.await(); + } + + finishRequest(Integer.toString(numberOfRequests + 1)).await(); + + embeddedChannel.flush(); + + assertFalse(embeddedChannel.isOpen()); + } + + + private void assertReadHttpMessageHasContent(EmbeddedChannel embeddedChannel, String expectedContent) { + FullHttpResponse response = (FullHttpResponse) embeddedChannel.outboundMessages().poll(); + assertNotNull("Expected response to exist, maybe you did not wait long enough?", response); + assertNotNull("Expected response to have content " + expectedContent, response.content()); + String data = new String(ByteBufUtil.getBytes(response.content()), StandardCharsets.UTF_8); + assertThat(data, is(expectedContent)); + } + + private FullHttpRequest createHttpRequest(String uri) { + return new DefaultFullHttpRequest(HTTP_1_1, HttpMethod.GET, uri); + } + + private static class AggregateUrisAndHeadersHandler extends SimpleChannelInboundHandler { + + static final Queue QUEUE_URI = new LinkedTransferQueue<>(); + + @Override + protected void channelRead0(ChannelHandlerContext ctx, HttpRequest request) throws Exception { + QUEUE_URI.add(request.uri()); + } + + } + + private class WorkEmulatorHandler extends SimpleChannelInboundHandler { + + @Override + protected void channelRead0(final ChannelHandlerContext ctx, final HttpPipelinedRequest pipelinedRequest) throws Exception { + final QueryStringDecoder decoder; + if (pipelinedRequest.last() instanceof FullHttpRequest) { + final FullHttpRequest fullHttpRequest = (FullHttpRequest) pipelinedRequest.last(); + decoder = new QueryStringDecoder(fullHttpRequest.uri()); + } else { + decoder = new QueryStringDecoder(AggregateUrisAndHeadersHandler.QUEUE_URI.poll()); + } + + final String uri = decoder.path().replace("/", ""); + final ByteBuf content = Unpooled.copiedBuffer(uri, StandardCharsets.UTF_8); + final DefaultFullHttpResponse httpResponse = new DefaultFullHttpResponse(HTTP_1_1, OK, content); + httpResponse.headers().add(CONTENT_LENGTH, content.readableBytes()); + + final CountDownLatch waitingLatch = new CountDownLatch(1); + waitingRequests.put(uri, waitingLatch); + final CountDownLatch finishingLatch = new CountDownLatch(1); + finishingRequests.put(uri, finishingLatch); + + executorService.submit(() -> { + try { + waitingLatch.await(1000, TimeUnit.SECONDS); + final ChannelPromise promise = ctx.newPromise(); + ctx.write(pipelinedRequest.createHttpResponse(httpResponse, promise), promise); + finishingLatch.countDown(); + } catch (InterruptedException e) { + fail(e.toString()); + } + }); + } + + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ByteBufBytesReferenceTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ByteBufBytesReferenceTests.java new file mode 100644 index 00000000000..bce875e8516 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ByteBufBytesReferenceTests.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.AbstractBytesReferenceTestCase; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; + +import java.io.IOException; + +public class ByteBufBytesReferenceTests extends AbstractBytesReferenceTestCase { + @Override + protected BytesReference newBytesReference(int length) throws IOException { + ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(length, bigarrays); + for (int i = 0; i < length; i++) { + out.writeByte((byte) random().nextInt(1 << 8)); + } + assertEquals(out.size(), length); + BytesReference ref = out.bytes(); + assertEquals(ref.length(), length); + BytesRef bytesRef = ref.toBytesRef(); + final ByteBuf buffer = Unpooled.wrappedBuffer(bytesRef.bytes, bytesRef.offset, bytesRef.length); + return Netty4Utils.toBytesReference(buffer); + } + + public void testSliceOnAdvancedBuffer() throws IOException { + BytesReference bytesReference = newBytesReference(randomIntBetween(10, 3 * PAGE_SIZE)); + BytesRef bytesRef = bytesReference.toBytesRef(); + ByteBuf channelBuffer = Unpooled.wrappedBuffer(bytesRef.bytes, bytesRef.offset, + bytesRef.length); + int numBytesToRead = randomIntBetween(1, 5); + for (int i = 0; i < numBytesToRead; i++) { + channelBuffer.readByte(); + } + BytesReference other = Netty4Utils.toBytesReference(channelBuffer); + BytesReference slice = bytesReference.slice(numBytesToRead, bytesReference.length() - numBytesToRead); + assertEquals(other, slice); + assertEquals(other.slice(3, 1), slice.slice(3, 1)); + } + + public void testImmutable() throws IOException { + BytesReference bytesReference = newBytesReference(randomIntBetween(10, 3 * PAGE_SIZE)); + BytesRef bytesRef = BytesRef.deepCopyOf(bytesReference.toBytesRef()); + ByteBuf channelBuffer = Unpooled.wrappedBuffer(bytesRef.bytes, bytesRef.offset, + bytesRef.length); + ByteBufBytesReference byteBufBytesReference = new ByteBufBytesReference(channelBuffer, bytesRef.length); + assertEquals(byteBufBytesReference, bytesReference); + channelBuffer.readInt(); // this advances the index of the channel buffer + assertEquals(byteBufBytesReference, bytesReference); + assertEquals(bytesRef, byteBufBytesReference.toBytesRef()); + + BytesRef unicodeBytes = new BytesRef(randomUnicodeOfCodepointLength(100)); + channelBuffer = Unpooled.wrappedBuffer(unicodeBytes.bytes, unicodeBytes.offset, unicodeBytes.length); + byteBufBytesReference = new ByteBufBytesReference(channelBuffer, unicodeBytes.length); + String utf8ToString = byteBufBytesReference.utf8ToString(); + channelBuffer.readInt(); // this advances the index of the channel buffer + assertEquals(utf8ToString, byteBufBytesReference.utf8ToString()); + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java new file mode 100644 index 00000000000..0e2ee23f950 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java @@ -0,0 +1,148 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport.netty4; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportResponseOptions; +import org.elasticsearch.transport.TransportSettings; + +import java.io.IOException; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + +public class Netty4ScheduledPingTests extends ESTestCase { + public void testScheduledPing() throws Exception { + ThreadPool threadPool = new TestThreadPool(getClass().getName()); + + Settings settings = Settings.builder() + .put(TcpTransport.PING_SCHEDULE.getKey(), "5ms") + .put(TransportSettings.PORT.getKey(), 0) + .put("cluster.name", "test") + .build(); + + CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService(); + + NamedWriteableRegistry registryA = new NamedWriteableRegistry(); + final Netty4Transport nettyA = new Netty4Transport(settings, threadPool, new NetworkService(settings), + BigArrays.NON_RECYCLING_INSTANCE, registryA, circuitBreakerService); + MockTransportService serviceA = new MockTransportService(settings, nettyA, threadPool); + serviceA.start(); + serviceA.acceptIncomingRequests(); + + NamedWriteableRegistry registryB = new NamedWriteableRegistry(); + final Netty4Transport nettyB = new Netty4Transport(settings, threadPool, new NetworkService(settings), + BigArrays.NON_RECYCLING_INSTANCE, registryB, circuitBreakerService); + MockTransportService serviceB = new MockTransportService(settings, nettyB, threadPool); + + serviceB.start(); + serviceB.acceptIncomingRequests(); + + DiscoveryNode nodeA = + new DiscoveryNode("TS_A", "TS_A", serviceA.boundAddress().publishAddress(), emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode nodeB = + new DiscoveryNode("TS_B", "TS_B", serviceB.boundAddress().publishAddress(), emptyMap(), emptySet(), Version.CURRENT); + + serviceA.connectToNode(nodeB); + serviceB.connectToNode(nodeA); + + assertBusy(new Runnable() { + @Override + public void run() { + assertThat(nettyA.getPing().getSuccessfulPings(), greaterThan(100L)); + assertThat(nettyB.getPing().getSuccessfulPings(), greaterThan(100L)); + } + }); + assertThat(nettyA.getPing().getFailedPings(), equalTo(0L)); + assertThat(nettyB.getPing().getFailedPings(), equalTo(0L)); + + serviceA.registerRequestHandler("sayHello", TransportRequest.Empty::new, ThreadPool.Names.GENERIC, + new TransportRequestHandler() { + @Override + public void messageReceived(TransportRequest.Empty request, TransportChannel channel) { + try { + channel.sendResponse(TransportResponse.Empty.INSTANCE, TransportResponseOptions.EMPTY); + } catch (IOException e) { + logger.error("Unexpected failure", e); + fail(e.getMessage()); + } + } + }); + + int rounds = scaledRandomIntBetween(100, 5000); + for (int i = 0; i < rounds; i++) { + serviceB.submitRequest(nodeA, "sayHello", + TransportRequest.Empty.INSTANCE, TransportRequestOptions.builder().withCompress(randomBoolean()).build(), + new TransportResponseHandler() { + @Override + public TransportResponse.Empty newInstance() { + return TransportResponse.Empty.INSTANCE; + } + + @Override + public String executor() { + return ThreadPool.Names.GENERIC; + } + + @Override + public void handleResponse(TransportResponse.Empty response) { + } + + @Override + public void handleException(TransportException exp) { + logger.error("Unexpected failure", exp); + fail("got exception instead of a response: " + exp.getMessage()); + } + }).txGet(); + } + + assertBusy(() -> { + assertThat(nettyA.getPing().getSuccessfulPings(), greaterThan(200L)); + assertThat(nettyB.getPing().getSuccessfulPings(), greaterThan(200L)); + }); + assertThat(nettyA.getPing().getFailedPings(), equalTo(0L)); + assertThat(nettyB.getPing().getFailedPings(), equalTo(0L)); + + Releasables.close(serviceA, serviceB); + terminate(threadPool); + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java new file mode 100644 index 00000000000..8e7ac400f46 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java @@ -0,0 +1,107 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportSettings; +import org.junit.After; +import org.junit.Before; + +import java.io.BufferedReader; +import java.io.InputStreamReader; +import java.net.InetAddress; +import java.net.Socket; +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.Matchers.is; + +/** + * This test checks, if a HTTP look-alike request (starting with a HTTP method and a space) + * actually returns text response instead of just dropping the connection + */ +public class Netty4SizeHeaderFrameDecoderTests extends ESTestCase { + + private final Settings settings = Settings.builder() + .put("node.name", "NettySizeHeaderFrameDecoderTests") + .put(TransportSettings.BIND_HOST.getKey(), "127.0.0.1") + .put(TransportSettings.PORT.getKey(), "0") + .build(); + + private ThreadPool threadPool; + private Netty4Transport nettyTransport; + private int port; + private InetAddress host; + + @Before + public void startThreadPool() { + threadPool = new ThreadPool(settings); + NetworkService networkService = new NetworkService(settings); + BigArrays bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); + nettyTransport = new Netty4Transport(settings, threadPool, networkService, bigArrays, new NamedWriteableRegistry(), + new NoneCircuitBreakerService()); + nettyTransport.start(); + + TransportAddress[] boundAddresses = nettyTransport.boundAddress().boundAddresses(); + InetSocketTransportAddress transportAddress = (InetSocketTransportAddress) randomFrom(boundAddresses); + port = transportAddress.address().getPort(); + host = transportAddress.address().getAddress(); + } + + @After + public void terminateThreadPool() throws InterruptedException { + nettyTransport.stop(); + terminate(threadPool); + threadPool = null; + } + + public void testThatTextMessageIsReturnedOnHTTPLikeRequest() throws Exception { + String randomMethod = randomFrom("GET", "POST", "PUT", "DELETE", "HEAD", "OPTIONS", "PATCH"); + String data = randomMethod + " / HTTP/1.1"; + + try (Socket socket = new Socket(host, port)) { + socket.getOutputStream().write(data.getBytes(StandardCharsets.UTF_8)); + socket.getOutputStream().flush(); + + try (BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))) { + assertThat(reader.readLine(), is("This is not a HTTP port")); + } + } + } + + public void testThatNothingIsReturnedForOtherInvalidPackets() throws Exception { + try (Socket socket = new Socket(host, port)) { + socket.getOutputStream().write("FOOBAR".getBytes(StandardCharsets.UTF_8)); + socket.getOutputStream().flush(); + + // end of stream + assertThat(socket.getInputStream().read(), is(-1)); + } + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportIT.java new file mode 100644 index 00000000000..b913fad933f --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportIT.java @@ -0,0 +1,124 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport.netty4; + +import io.netty.channel.Channel; +import org.elasticsearch.ESNetty4IntegTestCase; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportSettings; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +@ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1) +public class Netty4TransportIT extends ESNetty4IntegTestCase { + // static so we can use it in anonymous classes + private static String channelProfileName = null; + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) + .put(NetworkModule.TRANSPORT_TYPE_KEY, "exception-throwing").build(); + } + + @Override + protected Collection> nodePlugins() { + List> list = new ArrayList<>(); + list.add(ExceptionThrowingNetty4Transport.TestPlugin.class); + list.addAll(super.nodePlugins()); + return Collections.unmodifiableCollection(list); + } + + public void testThatConnectionFailsAsIntended() throws Exception { + Client transportClient = internalCluster().transportClient(); + ClusterHealthResponse clusterIndexHealths = transportClient.admin().cluster().prepareHealth().get(); + assertThat(clusterIndexHealths.getStatus(), is(ClusterHealthStatus.GREEN)); + try { + transportClient.filterWithHeader(Collections.singletonMap("ERROR", "MY MESSAGE")).admin().cluster().prepareHealth().get(); + fail("Expected exception, but didn't happen"); + } catch (ElasticsearchException e) { + assertThat(e.getMessage(), containsString("MY MESSAGE")); + assertThat(channelProfileName, is(TransportSettings.DEFAULT_PROFILE)); + } + } + + public static final class ExceptionThrowingNetty4Transport extends Netty4Transport { + + public static class TestPlugin extends Plugin { + public void onModule(NetworkModule module) { + module.registerTransport("exception-throwing", ExceptionThrowingNetty4Transport.class); + } + } + + @Inject + public ExceptionThrowingNetty4Transport( + Settings settings, + ThreadPool threadPool, + NetworkService networkService, + BigArrays bigArrays, + NamedWriteableRegistry namedWriteableRegistry, + CircuitBreakerService circuitBreakerService) { + super(settings, threadPool, networkService, bigArrays, namedWriteableRegistry, circuitBreakerService); + } + + protected String handleRequest(Channel channel, String profileName, + StreamInput stream, long requestId, int messageLengthBytes, Version version, + InetSocketAddress remoteAddress) throws IOException { + String action = super.handleRequest(channel, profileName, stream, requestId, messageLengthBytes, version, + remoteAddress); + channelProfileName = TransportSettings.DEFAULT_PROFILE; + return action; + } + + @Override + protected void validateRequest(StreamInput buffer, long requestId, String action) + throws IOException { + super.validateRequest(buffer, requestId, action); + String error = threadPool.getThreadContext().getHeader("ERROR"); + if (error != null) { + throw new ElasticsearchException(error); + } + } + + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java new file mode 100644 index 00000000000..32268d1f5d8 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport.netty4; + +import org.elasticsearch.ESNetty4IntegTestCase; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.client.transport.TransportClient; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; +import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.test.junit.annotations.Network; +import org.elasticsearch.transport.MockTransportClient; +import org.elasticsearch.transport.Netty4Plugin; + +import java.net.InetAddress; +import java.util.Locale; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +@ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1, numClientNodes = 0) +public class Netty4TransportMultiPortIntegrationIT extends ESNetty4IntegTestCase { + + private static int randomPort = -1; + private static String randomPortRange; + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + if (randomPort == -1) { + randomPort = randomIntBetween(49152, 65525); + randomPortRange = String.format(Locale.ROOT, "%s-%s", randomPort, randomPort+10); + } + Settings.Builder builder = Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put("network.host", "127.0.0.1") + .put("transport.profiles.client1.port", randomPortRange) + .put("transport.profiles.client1.publish_host", "127.0.0.7") + .put("transport.profiles.client1.publish_port", "4321") + .put("transport.profiles.client1.reuse_address", true); + return builder.build(); + } + + public void testThatTransportClientCanConnect() throws Exception { + Settings settings = Settings.builder() + .put("cluster.name", internalCluster().getClusterName()) + .put(NetworkModule.TRANSPORT_TYPE_KEY, Netty4Plugin.NETTY_TRANSPORT_NAME) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + try (TransportClient transportClient = new MockTransportClient(settings, Netty4Plugin.class)) { + transportClient.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("127.0.0.1"), randomPort)); + ClusterHealthResponse response = transportClient.admin().cluster().prepareHealth().get(); + assertThat(response.getStatus(), is(ClusterHealthStatus.GREEN)); + } + } + + @Network + public void testThatInfosAreExposed() throws Exception { + NodesInfoResponse response = client().admin().cluster().prepareNodesInfo().clear().setTransport(true).get(); + for (NodeInfo nodeInfo : response.getNodes()) { + assertThat(nodeInfo.getTransport().getProfileAddresses().keySet(), hasSize(1)); + assertThat(nodeInfo.getTransport().getProfileAddresses(), hasKey("client1")); + BoundTransportAddress boundTransportAddress = nodeInfo.getTransport().getProfileAddresses().get("client1"); + for (TransportAddress transportAddress : boundTransportAddress.boundAddresses()) { + assertThat(transportAddress, instanceOf(InetSocketTransportAddress.class)); + } + + // bound addresses + for (TransportAddress transportAddress : boundTransportAddress.boundAddresses()) { + assertThat(transportAddress, instanceOf(InetSocketTransportAddress.class)); + assertThat(((InetSocketTransportAddress) transportAddress).address().getPort(), + is(allOf(greaterThanOrEqualTo(randomPort), lessThanOrEqualTo(randomPort + 10)))); + } + + // publish address + assertThat(boundTransportAddress.publishAddress(), instanceOf(InetSocketTransportAddress.class)); + InetSocketTransportAddress publishAddress = (InetSocketTransportAddress) boundTransportAddress.publishAddress(); + assertThat(NetworkAddress.format(publishAddress.address().getAddress()), is("127.0.0.7")); + assertThat(publishAddress.address().getPort(), is(4321)); + } + } +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportPublishAddressIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportPublishAddressIT.java new file mode 100644 index 00000000000..0b8e4ed85b5 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4TransportPublishAddressIT.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import org.elasticsearch.ESNetty4IntegTestCase; +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.network.NetworkUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.node.Node; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.Netty4Plugin; + +import java.net.Inet4Address; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; + +/** + * Checks that Elasticsearch produces a sane publish_address when it binds to + * different ports on ipv4 and ipv6. + */ +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +public class Netty4TransportPublishAddressIT extends ESNetty4IntegTestCase { + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(NetworkModule.TRANSPORT_TYPE_KEY, Netty4Plugin.NETTY_TRANSPORT_NAME) + .build(); + } + + public void testDifferentPorts() throws Exception { + if (!NetworkUtils.SUPPORTS_V6) { + return; + } + logger.info("--> starting a node on ipv4 only"); + Settings ipv4Settings = Settings.builder().put("network.host", "127.0.0.1").build(); + String ipv4OnlyNode = internalCluster().startNode(ipv4Settings); // should bind 127.0.0.1:XYZ + + logger.info("--> starting a node on ipv4 and ipv6"); + Settings bothSettings = Settings.builder().put("network.host", "_local_").build(); + internalCluster().startNode(bothSettings); // should bind [::1]:XYZ and 127.0.0.1:XYZ+1 + + logger.info("--> waiting for the cluster to declare itself stable"); + ensureStableCluster(2); // fails if port of publish address does not match corresponding bound address + + logger.info("--> checking if boundAddress matching publishAddress has same port"); + NodesInfoResponse nodesInfoResponse = client().admin().cluster().prepareNodesInfo().get(); + for (NodeInfo nodeInfo : nodesInfoResponse.getNodes()) { + BoundTransportAddress boundTransportAddress = nodeInfo.getTransport().getAddress(); + if (nodeInfo.getNode().getName().equals(ipv4OnlyNode)) { + assertThat(boundTransportAddress.boundAddresses().length, equalTo(1)); + assertThat(boundTransportAddress.boundAddresses()[0].getPort(), equalTo(boundTransportAddress.publishAddress().getPort())); + } else { + assertThat(boundTransportAddress.boundAddresses().length, greaterThan(1)); + for (TransportAddress boundAddress : boundTransportAddress.boundAddresses()) { + assertThat(boundAddress, instanceOf(InetSocketTransportAddress.class)); + InetSocketTransportAddress inetBoundAddress = (InetSocketTransportAddress) boundAddress; + if (inetBoundAddress.address().getAddress() instanceof Inet4Address) { + // IPv4 address is preferred publish address for _local_ + assertThat(inetBoundAddress.getPort(), equalTo(boundTransportAddress.publishAddress().getPort())); + } + } + } + } + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4UtilsTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4UtilsTests.java new file mode 100644 index 00000000000..8372a8540b8 --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4UtilsTests.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.CompositeByteBuf; +import io.netty.buffer.Unpooled; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.AbstractBytesReferenceTestCase; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class Netty4UtilsTests extends ESTestCase { + + private static final int PAGE_SIZE = BigArrays.BYTE_PAGE_SIZE; + private final BigArrays bigarrays = new BigArrays(null, new NoneCircuitBreakerService(), false); + + public void testToChannelBufferWithEmptyRef() throws IOException { + ByteBuf buffer = Netty4Utils.toByteBuf(getRandomizedBytesReference(0)); + assertSame(Unpooled.EMPTY_BUFFER, buffer); + } + + public void testToChannelBufferWithSlice() throws IOException { + BytesReference ref = getRandomizedBytesReference(randomIntBetween(1, 3 * PAGE_SIZE)); + int sliceOffset = randomIntBetween(0, ref.length()); + int sliceLength = randomIntBetween(ref.length() - sliceOffset, ref.length() - sliceOffset); + BytesReference slice = ref.slice(sliceOffset, sliceLength); + ByteBuf buffer = Netty4Utils.toByteBuf(slice); + BytesReference bytesReference = Netty4Utils.toBytesReference(buffer); + assertArrayEquals(BytesReference.toBytes(slice), BytesReference.toBytes(bytesReference)); + } + + public void testToChannelBufferWithSliceAfter() throws IOException { + BytesReference ref = getRandomizedBytesReference(randomIntBetween(1, 3 * PAGE_SIZE)); + int sliceOffset = randomIntBetween(0, ref.length()); + int sliceLength = randomIntBetween(ref.length() - sliceOffset, ref.length() - sliceOffset); + ByteBuf buffer = Netty4Utils.toByteBuf(ref); + BytesReference bytesReference = Netty4Utils.toBytesReference(buffer); + assertArrayEquals(BytesReference.toBytes(ref.slice(sliceOffset, sliceLength)), + BytesReference.toBytes(bytesReference.slice(sliceOffset, sliceLength))); + } + + public void testToChannelBuffer() throws IOException { + BytesReference ref = getRandomizedBytesReference(randomIntBetween(1, 3 * PAGE_SIZE)); + ByteBuf buffer = Netty4Utils.toByteBuf(ref); + BytesReference bytesReference = Netty4Utils.toBytesReference(buffer); + if (ref instanceof ByteBufBytesReference) { + assertEquals(buffer, ((ByteBufBytesReference) ref).toByteBuf()); + } else if (AbstractBytesReferenceTestCase.getNumPages(ref) > 1) { // we gather the buffers into a channel buffer + assertTrue(buffer instanceof CompositeByteBuf); + } + assertArrayEquals(BytesReference.toBytes(ref), BytesReference.toBytes(bytesReference)); + } + + private BytesReference getRandomizedBytesReference(int length) throws IOException { + // we know bytes stream output always creates a paged bytes reference, we use it to create randomized content + ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(length, bigarrays); + for (int i = 0; i < length; i++) { + out.writeByte((byte) random().nextInt(1 << 8)); + } + assertEquals(out.size(), length); + BytesReference ref = out.bytes(); + assertEquals(ref.length(), length); + if (randomBoolean()) { + return new BytesArray(ref.toBytesRef()); + } else if (randomBoolean()) { + BytesRef bytesRef = ref.toBytesRef(); + return Netty4Utils.toBytesReference(Unpooled.wrappedBuffer(bytesRef.bytes, bytesRef.offset, + bytesRef.length)); + } else { + return ref; + } + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyTransportMultiPortTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyTransportMultiPortTests.java new file mode 100644 index 00000000000..e76431daf8d --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyTransportMultiPortTests.java @@ -0,0 +1,146 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport.netty4; + +import org.elasticsearch.common.component.Lifecycle; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.network.NetworkUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.TransportSettings; +import org.junit.Before; + +import static org.hamcrest.Matchers.is; + +public class NettyTransportMultiPortTests extends ESTestCase { + + private String host; + + @Before + public void setup() { + if (NetworkUtils.SUPPORTS_V6 && randomBoolean()) { + host = "::1"; + } else { + host = "127.0.0.1"; + } + } + + public void testThatNettyCanBindToMultiplePorts() throws Exception { + Settings settings = Settings.builder() + .put("network.host", host) + .put(TransportSettings.PORT.getKey(), 22) // will not actually bind to this + .put("transport.profiles.default.port", 0) + .put("transport.profiles.client1.port", 0) + .build(); + + ThreadPool threadPool = new TestThreadPool("tst"); + try (TcpTransport transport = startTransport(settings, threadPool)) { + assertEquals(1, transport.profileBoundAddresses().size()); + assertEquals(1, transport.boundAddress().boundAddresses().length); + } finally { + terminate(threadPool); + } + } + + public void testThatDefaultProfileInheritsFromStandardSettings() throws Exception { + Settings settings = Settings.builder() + .put("network.host", host) + .put(TransportSettings.PORT.getKey(), 0) + .put("transport.profiles.client1.port", 0) + .build(); + + ThreadPool threadPool = new TestThreadPool("tst"); + try (TcpTransport transport = startTransport(settings, threadPool)) { + assertEquals(1, transport.profileBoundAddresses().size()); + assertEquals(1, transport.boundAddress().boundAddresses().length); + } finally { + terminate(threadPool); + } + } + + public void testThatProfileWithoutPortSettingsFails() throws Exception { + + Settings settings = Settings.builder() + .put("network.host", host) + .put(TransportSettings.PORT.getKey(), 0) + .put("transport.profiles.client1.whatever", "foo") + .build(); + + ThreadPool threadPool = new TestThreadPool("tst"); + try (TcpTransport transport = startTransport(settings, threadPool)) { + assertEquals(0, transport.profileBoundAddresses().size()); + assertEquals(1, transport.boundAddress().boundAddresses().length); + } finally { + terminate(threadPool); + } + } + + public void testThatDefaultProfilePortOverridesGeneralConfiguration() throws Exception { + Settings settings = Settings.builder() + .put("network.host", host) + .put(TransportSettings.PORT.getKey(), 22) // will not actually bind to this + .put("transport.profiles.default.port", 0) + .build(); + + ThreadPool threadPool = new TestThreadPool("tst"); + try (TcpTransport transport = startTransport(settings, threadPool)) { + assertEquals(0, transport.profileBoundAddresses().size()); + assertEquals(1, transport.boundAddress().boundAddresses().length); + } finally { + terminate(threadPool); + } + } + + public void testThatProfileWithoutValidNameIsIgnored() throws Exception { + Settings settings = Settings.builder() + .put("network.host", host) + .put(TransportSettings.PORT.getKey(), 0) + // mimics someone trying to define a profile for .local which is the profile for a node request to itself + .put("transport.profiles." + TransportService.DIRECT_RESPONSE_PROFILE + ".port", 22) // will not actually bind to this + .put("transport.profiles..port", 23) // will not actually bind to this + .build(); + + ThreadPool threadPool = new TestThreadPool("tst"); + try (TcpTransport transport = startTransport(settings, threadPool)) { + assertEquals(0, transport.profileBoundAddresses().size()); + assertEquals(1, transport.boundAddress().boundAddresses().length); + } finally { + terminate(threadPool); + } + } + + private TcpTransport startTransport(Settings settings, ThreadPool threadPool) { + BigArrays bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); + TcpTransport transport = new Netty4Transport(settings, threadPool, new NetworkService(settings), bigArrays, + new NamedWriteableRegistry(), new NoneCircuitBreakerService()); + transport.start(); + + assertThat(transport.lifecycleState(), is(Lifecycle.State.STARTED)); + return transport; + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java new file mode 100644 index 00000000000..bafec9d2ffa --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.AbstractSimpleTransportTestCase; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportSettings; + +import java.net.InetAddress; +import java.net.UnknownHostException; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.hamcrest.Matchers.containsString; + +public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase { + + public static MockTransportService nettyFromThreadPool( + Settings settings, + ThreadPool threadPool, final Version version) { + NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); + Transport transport = new Netty4Transport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, + namedWriteableRegistry, new NoneCircuitBreakerService()) { + @Override + protected Version getCurrentVersion() { + return version; + } + }; + return new MockTransportService(Settings.EMPTY, transport, threadPool); + } + + @Override + protected MockTransportService build(Settings settings, Version version) { + settings = Settings.builder().put(settings).put(TransportSettings.PORT.getKey(), "0").build(); + MockTransportService transportService = nettyFromThreadPool(settings, threadPool, version); + transportService.start(); + return transportService; + } + + public void testConnectException() throws UnknownHostException { + try { + serviceA.connectToNode(new DiscoveryNode("C", new InetSocketTransportAddress(InetAddress.getByName("localhost"), 9876), + emptyMap(), emptySet(),Version.CURRENT)); + fail("Expected ConnectTransportException"); + } catch (ConnectTransportException e) { + assertThat(e.getMessage(), containsString("connect_timeout")); + assertThat(e.getMessage(), containsString("[127.0.0.1:9876]")); + } + } + +} diff --git a/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yaml b/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yaml new file mode 100644 index 00000000000..a7beac1491f --- /dev/null +++ b/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yaml @@ -0,0 +1,13 @@ +# Integration tests for Netty transport +# +"Netty loaded": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - match: { nodes.$master.modules.1.name: transport-netty4 } diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index 5f54d14eca2..96c1139c790 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.transport.MockTcpTransportPlugin; +import org.elasticsearch.transport.Netty4Plugin; import org.elasticsearch.transport.client.PreBuiltTransportClient; import org.junit.After; import org.junit.AfterClass; @@ -49,6 +50,8 @@ import java.util.Locale; import java.util.concurrent.atomic.AtomicInteger; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween; import static org.hamcrest.Matchers.notNullValue; /** @@ -82,13 +85,22 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { .put("client.transport.ignore_cluster_name", true) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir); final Collection> plugins; - if (random().nextBoolean()) { - builder.put(NetworkModule.TRANSPORT_TYPE_KEY, MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME); - plugins = Collections.singleton(MockTcpTransportPlugin.class); - } else { - plugins = Collections.emptyList(); + switch (randomIntBetween(0, 2)) { + case 0: + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME); + plugins = Collections.singleton(MockTcpTransportPlugin.class); + break; + case 1: + plugins = Collections.emptyList(); + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, Netty4Plugin.NETTY_TRANSPORT_NAME); + break; + case 2: + plugins = Collections.emptyList(); + break; + default: + throw new AssertionError(); } - TransportClient client = new PreBuiltTransportClient(builder.build(), plugins).addTransportAddresses(transportAddresses); + TransportClient client = new PreBuiltTransportClient(builder.build(), plugins).addTransportAddresses(transportAddresses); logger.info("--> Elasticsearch Java TransportClient started"); diff --git a/qa/smoke-test-http/build.gradle b/qa/smoke-test-http/build.gradle index 7fa3205537a..038115c2d24 100644 --- a/qa/smoke-test-http/build.gradle +++ b/qa/smoke-test-http/build.gradle @@ -21,4 +21,5 @@ apply plugin: 'elasticsearch.rest-test' dependencies { testCompile project(path: ':modules:transport-netty3', configuration: 'runtime') // for http + testCompile project(path: ':modules:transport-netty4', configuration: 'runtime') // for http } \ No newline at end of file diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java index e22b2e187ea..6e548dd2ca6 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java @@ -65,4 +65,5 @@ public class HttpCompressionIT extends ESIntegTestCase { assertNull(response.getHeader(HttpHeaders.CONTENT_ENCODING)); } } + } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpSmokeTestCase.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpSmokeTestCase.java index 636d652feb1..03a7ba68a71 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpSmokeTestCase.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpSmokeTestCase.java @@ -25,6 +25,8 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.transport.MockTcpTransportPlugin; import org.elasticsearch.transport.Netty3Plugin; +import org.elasticsearch.transport.Netty4Plugin; +import org.junit.BeforeClass; import java.util.Collection; import java.util.Collections; @@ -32,33 +34,56 @@ import java.util.List; public abstract class HttpSmokeTestCase extends ESIntegTestCase { + private static String nodeTransportTypeKey; + private static String nodeHttpTypeKey; + private static String clientTypeKey; + + @SuppressWarnings("unchecked") + @BeforeClass + public static void setUpTransport() { + nodeTransportTypeKey = getTypeKey(randomFrom(MockTcpTransportPlugin.class, Netty3Plugin.class, Netty4Plugin.class)); + nodeHttpTypeKey = getTypeKey(randomFrom(Netty3Plugin.class, Netty4Plugin.class)); + clientTypeKey = getTypeKey(randomFrom(MockTcpTransportPlugin.class, Netty3Plugin.class, Netty4Plugin.class)); + } + + private static String getTypeKey(Class clazz) { + if (clazz.equals(MockTcpTransportPlugin.class)) { + return MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME; + } else if (clazz.equals(Netty3Plugin.class)) { + return Netty3Plugin.NETTY_TRANSPORT_NAME; + } else { + assert clazz.equals(Netty4Plugin.class); + return Netty4Plugin.NETTY_TRANSPORT_NAME; + } + } + @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put("netty.assert.buglevel", false) - .put(NetworkModule.TRANSPORT_TYPE_KEY, randomFrom(Netty3Plugin.NETTY_TRANSPORT_NAME, - MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME)) - .put(NetworkModule.HTTP_ENABLED.getKey(), true).build(); + .put(super.nodeSettings(nodeOrdinal)) + .put("netty.assert.buglevel", false) + .put(NetworkModule.TRANSPORT_TYPE_KEY, nodeTransportTypeKey) + .put(NetworkModule.HTTP_TYPE_KEY, nodeHttpTypeKey) + .put(NetworkModule.HTTP_ENABLED.getKey(), true).build(); } @Override protected Collection> nodePlugins() { - return pluginList(MockTcpTransportPlugin.class, Netty3Plugin.class, BogusPlugin.class); + return pluginList(MockTcpTransportPlugin.class, Netty3Plugin.class, Netty4Plugin.class, BogusPlugin.class); } @Override protected Collection> transportClientPlugins() { - return pluginList(MockTcpTransportPlugin.class, Netty3Plugin.class, BogusPlugin.class); + return pluginList(MockTcpTransportPlugin.class, Netty3Plugin.class, Netty4Plugin.class, BogusPlugin.class); } @Override protected Settings transportClientSettings() { return Settings.builder() - .put(super.transportClientSettings()) - .put("netty.assert.buglevel", false) - .put(NetworkModule.TRANSPORT_TYPE_KEY, randomFrom(Netty3Plugin.NETTY_TRANSPORT_NAME, - MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME)).build(); + .put(super.transportClientSettings()) + .put("netty.assert.buglevel", false) + .put(NetworkModule.TRANSPORT_TYPE_KEY, clientTypeKey) + .build(); } @Override @@ -66,15 +91,18 @@ public abstract class HttpSmokeTestCase extends ESIntegTestCase { return true; } - public static final class BogusPlugin extends Plugin { - // see Netty3Plugin.... this runs without the permission from the netty3 module so it will fail since reindex can't set the property + + // this runs without the permission from the netty modules so it will fail since reindex can't set the property // to make it still work we disable that check but need to register the setting first - private static final Setting ASSERT_NETTY_BUGLEVEL = Setting.boolSetting("netty.assert.buglevel", true, - Setting.Property.NodeScope); + private static final Setting ASSERT_NETTY_BUGLEVEL = + Setting.boolSetting("netty.assert.buglevel", true, Setting.Property.NodeScope); + @Override public List> getSettings() { return Collections.singletonList(ASSERT_NETTY_BUGLEVEL); } + } + } diff --git a/settings.gradle b/settings.gradle index e070e761433..d04e4b233a6 100644 --- a/settings.gradle +++ b/settings.gradle @@ -26,6 +26,7 @@ List projects = [ 'modules:lang-mustache', 'modules:lang-painless', 'modules:transport-netty3', + 'modules:transport-netty4', 'modules:reindex', 'modules:percolator', 'plugins:analysis-icu', diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java index 3414e9eda10..16647b04a47 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java @@ -242,7 +242,7 @@ public abstract class ESBackcompatTestCase extends ESIntegTestCase { protected Settings commonNodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder().put(requiredSettings()); - builder.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty3"); // run same transport / disco as external + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, randomBoolean() ? "netty3" : "netty4"); // run same transport / disco as external builder.put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen"); return builder.build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java index b5a5778c1e5..cde4e5f6ac9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.Randomness; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; @@ -51,7 +52,7 @@ final class ExternalNode implements Closeable { public static final Settings REQUIRED_SETTINGS = Settings.builder() .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen") - .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty3").build(); // we need network mode for this + .put(NetworkModule.TRANSPORT_TYPE_KEY, Randomness.get().nextBoolean() ? "netty3" : "netty4").build(); // we need network mode for this private final Path path; private final Random random; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 16606e9fa91..bc16f4f7477 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -421,4 +421,5 @@ public abstract class ESRestTestCase extends ESTestCase { } return runningTasks; } + } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java index 3fc924176f0..f291e0bbeec 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java @@ -28,25 +28,19 @@ import java.util.Map; public class FakeRestRequest extends RestRequest { private final Map headers; - - private final Map params; - private final BytesReference content; - private final Method method; - private final String path; public FakeRestRequest() { this(new HashMap<>(), new HashMap<>(), null, Method.GET, "/"); } private FakeRestRequest(Map headers, Map params, BytesReference content, Method method, String path) { + super(params, path); this.headers = headers; - this.params = params; this.content = content; this.method = method; - this.path = path; } @Override @@ -56,12 +50,7 @@ public class FakeRestRequest extends RestRequest { @Override public String uri() { - return path; - } - - @Override - public String rawPath() { - return path; + return rawPath(); } @Override @@ -84,31 +73,8 @@ public class FakeRestRequest extends RestRequest { return headers.entrySet(); } - @Override - public boolean hasParam(String key) { - return params.containsKey(key); - } - - @Override - public String param(String key) { - return params.get(key); - } - - @Override - public String param(String key, String defaultValue) { - String value = params.get(key); - if (value == null) { - return defaultValue; - } - return value; - } - - @Override - public Map params() { - return params; - } - public static class Builder { + private Map headers = new HashMap<>(); private Map params = new HashMap<>(); @@ -147,6 +113,7 @@ public class FakeRestRequest extends RestRequest { public FakeRestRequest build() { return new FakeRestRequest(headers, params, content, method, path); } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java index eb4613f9e12..44a2bbd2166 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -43,6 +43,7 @@ import java.io.BufferedOutputStream; import java.io.Closeable; import java.io.IOException; import java.io.OutputStream; +import java.io.UncheckedIOException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; @@ -146,8 +147,8 @@ public class MockTcpTransport extends TcpTransport output.write(minimalHeader); output.writeInt(msgSize); output.write(buffer); - BytesReference bytes = output.bytes(); - if (validateMessageHeader(bytes)) { + final BytesReference bytes = output.bytes(); + if (TcpTransport.validateMessageHeader(bytes)) { InetSocketAddress remoteAddress = (InetSocketAddress) socket.getRemoteSocketAddress(); messageReceived(bytes.slice(TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE, msgSize), mockChannel, mockChannel.profile, remoteAddress, msgSize); @@ -349,5 +350,6 @@ public class MockTcpTransport extends TcpTransport protected Version getCurrentVersion() { return mockVersion; } + } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTransportClient.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTransportClient.java index a198ef77956..2b0f551dbb7 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTransportClient.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTransportClient.java @@ -37,4 +37,5 @@ public class MockTransportClient extends TransportClient { public MockTransportClient(Settings settings, Collection> plugins) { super(settings, DEFAULT_SETTINGS, plugins); } + } From cd596772ee1ba48296371fdb4fb035a3c1cc7eda Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Sat, 23 Jul 2016 22:46:48 +0200 Subject: [PATCH 48/93] Persistent Node Names (#19456) With #19140 we started persisting the node ID across node restarts. Now that we have a "stable" anchor, we can use it to generate a stable default node name and make it easier to track nodes over a restarts. Sadly, this means we will not have those random fun Marvel characters but we feel this is the right tradeoff. On the implementation side, this requires a bit of juggling because we now need to read the node id from disk before we can log as the node node is part of each log message. The PR move the initialization of NodeEnvironment as high up in the starting sequence as possible, with only one logging message before it to indicate we are initializing. Things look now like this: ``` [2016-07-15 19:38:39,742][INFO ][node ] [_unset_] initializing ... [2016-07-15 19:38:39,826][INFO ][node ] [aAmiW40] node name set to [aAmiW40] by default. set the [node.name] settings to change it [2016-07-15 19:38:39,829][INFO ][env ] [aAmiW40] using [1] data paths, mounts [[ /(/dev/disk1)]], net usable_space [5.5gb], net total_space [232.6gb], spins? [unknown], types [hfs] [2016-07-15 19:38:39,830][INFO ][env ] [aAmiW40] heap size [1.9gb], compressed ordinary object pointers [true] [2016-07-15 19:38:39,837][INFO ][node ] [aAmiW40] version[5.0.0-alpha5-SNAPSHOT], pid[46048], build[473d3c0/2016-07-15T17:38:06.771Z], OS[Mac OS X/10.11.5/x86_64], JVM[Oracle Corporation/Java HotSpot(TM) 64-Bit Server VM/1.8.0_51/25.51-b03] [2016-07-15 19:38:40,980][INFO ][plugins ] [aAmiW40] modules [percolator, lang-mustache, lang-painless, reindex, aggs-matrix-stats, lang-expression, ingest-common, lang-groovy, transport-netty], plugins [] [2016-07-15 19:38:43,218][INFO ][node ] [aAmiW40] initialized ``` Needless to say, settings `node.name` explicitly still works as before. The commit also contains some clean ups to the relationship between Environment, Settings and Plugins. The previous code suggested the path related settings could be changed after the initial Environment was changed. This did not have any effect as the security manager already locked things down. --- .../elasticsearch/bootstrap/Bootstrap.java | 18 +- .../client/transport/TransportClient.java | 21 +- .../cluster/node/DiscoveryNode.java | 6 +- .../common/component/AbstractComponent.java | 3 +- .../elasticsearch/common/logging/Loggers.java | 6 +- .../common/util/concurrent/EsExecutors.java | 8 +- .../org/elasticsearch/env/Environment.java | 31 +- .../elasticsearch/env/NodeEnvironment.java | 35 +- .../elasticsearch/index/IndexSettings.java | 3 +- .../java/org/elasticsearch/node/Node.java | 155 +- .../internal/InternalSettingsPreparer.java | 48 +- .../org/elasticsearch/tribe/TribeService.java | 2 +- core/src/main/resources/config/names.txt | 2825 ----------------- .../env/NodeEnvironmentTests.java | 40 +- .../index/shard/ShardPathTests.java | 3 +- .../InternalSettingsPreparerTests.java | 4 +- .../migration/migrate_5_0/settings.asciidoc | 3 + .../setup/important-settings.asciidoc | 6 +- .../test/cat.allocation/10_basic.yaml | 2 +- .../org/elasticsearch/node/NodeTests.java | 58 + .../test/IndexSettingsModule.java | 8 +- 21 files changed, 274 insertions(+), 3011 deletions(-) delete mode 100644 core/src/main/resources/config/names.txt create mode 100644 test/framework/src/main/java/org/elasticsearch/node/NodeTests.java diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index bdec058b04a..7fa6245d1ec 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -142,7 +142,8 @@ final class Bootstrap { JvmInfo.jvmInfo(); } - private void setup(boolean addShutdownHook, Settings settings, Environment environment) throws Exception { + private void setup(boolean addShutdownHook, Environment environment) throws Exception { + Settings settings = environment.settings(); initializeNatives( environment.tmpFile(), BootstrapSettings.MEMORY_LOCK_SETTING.get(settings), @@ -171,7 +172,7 @@ final class Bootstrap { // install SM after natives, shutdown hooks, etc. Security.configure(environment, BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(settings)); - node = new Node(settings) { + node = new Node(environment) { @Override protected void validateNodeBeforeAcceptingRequests(Settings settings, BoundTransportAddress boundTransportAddress) { BootstrapCheck.check(settings, boundTransportAddress); @@ -179,7 +180,7 @@ final class Bootstrap { }; } - private static Environment initialSettings(boolean foreground, Path pidFile, Map esSettings) { + private static Environment initialEnvironment(boolean foreground, Path pidFile, Map esSettings) { Terminal terminal = foreground ? Terminal.DEFAULT : null; Settings.Builder builder = Settings.builder(); if (pidFile != null) { @@ -225,9 +226,8 @@ final class Bootstrap { INSTANCE = new Bootstrap(); - Environment environment = initialSettings(foreground, pidFile, esSettings); - Settings settings = environment.settings(); - LogConfigurator.configure(settings, true); + Environment environment = initialEnvironment(foreground, pidFile, esSettings); + LogConfigurator.configure(environment.settings(), true); checkForCustomConfFile(); if (environment.pidFile() != null) { @@ -250,9 +250,9 @@ final class Bootstrap { // initialized as we do not want to grant the runtime permission // setDefaultUncaughtExceptionHandler Thread.setDefaultUncaughtExceptionHandler( - new ElasticsearchUncaughtExceptionHandler(() -> Node.NODE_NAME_SETTING.get(settings))); + new ElasticsearchUncaughtExceptionHandler(() -> Node.NODE_NAME_SETTING.get(environment.settings()))); - INSTANCE.setup(true, settings, environment); + INSTANCE.setup(true, environment); INSTANCE.start(); @@ -266,7 +266,7 @@ final class Bootstrap { } ESLogger logger = Loggers.getLogger(Bootstrap.class); if (INSTANCE.node != null) { - logger = Loggers.getLogger(Bootstrap.class, INSTANCE.node.settings().get("node.name")); + logger = Loggers.getLogger(Bootstrap.class, Node.NODE_NAME_SETTING.get(INSTANCE.node.settings())); } // HACK, it sucks to do this, but we will run users out of disk space otherwise if (e instanceof CreationException) { diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 577c00178b6..a50a1e7bbea 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -19,15 +19,6 @@ package org.elasticsearch.client.transport; -import java.io.Closeable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; @@ -63,6 +54,15 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.TransportService; +import java.io.Closeable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + /** * The transport client allows to create a client that is not part of the cluster, but simply connects to one * or more nodes directly by adding their respective addresses using {@link #addTransportAddress(org.elasticsearch.common.transport.TransportAddress)}. @@ -100,6 +100,9 @@ public abstract class TransportClient extends AbstractClient { private static ClientTemplate buildTemplate(Settings providedSettings, Settings defaultSettings, Collection> plugins) { + if (Node.NODE_NAME_SETTING.exists(providedSettings) == false) { + providedSettings = Settings.builder().put(providedSettings).put(Node.NODE_NAME_SETTING.getKey(), "_client_").build(); + } final PluginsService pluginsService = newPluginService(providedSettings, plugins); final Settings settings = Settings.builder().put(defaultSettings).put(pluginsService.updatedSettings()).build(); final List resourcesToClose = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index 5f2cb10212d..0c39c43bc9f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -39,7 +39,6 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.function.Predicate; -import java.util.function.Supplier; import static org.elasticsearch.common.transport.TransportAddressSerializers.addressToStream; @@ -191,7 +190,7 @@ public class DiscoveryNode implements Writeable, ToXContent { } /** Creates a DiscoveryNode representing the local node. */ - public static DiscoveryNode createLocal(Settings settings, TransportAddress publishAddress, String nodeIdSupplier) { + public static DiscoveryNode createLocal(Settings settings, TransportAddress publishAddress, String nodeId) { Map attributes = new HashMap<>(Node.NODE_ATTRIBUTES.get(settings).getAsMap()); Set roles = new HashSet<>(); if (Node.NODE_INGEST_SETTING.get(settings)) { @@ -204,8 +203,7 @@ public class DiscoveryNode implements Writeable, ToXContent { roles.add(DiscoveryNode.Role.DATA); } - return new DiscoveryNode(Node.NODE_NAME_SETTING.get(settings), nodeIdSupplier, publishAddress, - attributes, roles, Version.CURRENT); + return new DiscoveryNode(Node.NODE_NAME_SETTING.get(settings), nodeId, publishAddress,attributes, roles, Version.CURRENT); } /** diff --git a/core/src/main/java/org/elasticsearch/common/component/AbstractComponent.java b/core/src/main/java/org/elasticsearch/common/component/AbstractComponent.java index d2fb9e3ecb9..fa49a80123d 100644 --- a/core/src/main/java/org/elasticsearch/common/component/AbstractComponent.java +++ b/core/src/main/java/org/elasticsearch/common/component/AbstractComponent.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.node.Node; /** * @@ -50,7 +51,7 @@ public abstract class AbstractComponent { * Returns the nodes name from the settings or the empty string if not set. */ public final String nodeName() { - return settings.get("node.name", ""); + return Node.NODE_NAME_SETTING.get(settings); } /** diff --git a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java index 613193baee5..4a938e38a2e 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.node.Node; import java.net.InetAddress; import java.net.UnknownHostException; @@ -101,9 +102,8 @@ public class Loggers { prefixesList.add(addr.getHostName()); } } - String name = settings.get("node.name"); - if (name != null) { - prefixesList.add(name); + if (Node.NODE_NAME_SETTING.exists(settings)) { + prefixesList.add(Node.NODE_NAME_SETTING.get(settings)); } if (prefixes != null && prefixes.length > 0) { prefixesList.addAll(asList(prefixes)); diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 5ac94f8b386..2d682648ca4 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -22,6 +22,7 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.node.Node; import java.util.Arrays; import java.util.concurrent.BlockingQueue; @@ -83,11 +84,10 @@ public class EsExecutors { } public static String threadName(Settings settings, String namePrefix) { - String nodeName = settings.get("node.name"); - if (nodeName == null) { - return threadName("", namePrefix); + if (Node.NODE_NAME_SETTING.exists(settings)) { + return threadName(Node.NODE_NAME_SETTING.get(settings), namePrefix); } else { - return threadName(nodeName, namePrefix); + return threadName("", namePrefix); } } diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java index 8c8b72bff63..4b544aa3882 100644 --- a/core/src/main/java/org/elasticsearch/env/Environment.java +++ b/core/src/main/java/org/elasticsearch/env/Environment.java @@ -36,6 +36,7 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; import java.util.function.Function; import static org.elasticsearch.common.Strings.cleanPath; @@ -107,7 +108,6 @@ public class Environment { } public Environment(Settings settings) { - this.settings = settings; final Path homeFile; if (PATH_HOME_SETTING.exists(settings)) { homeFile = PathUtils.get(cleanPath(PATH_HOME_SETTING.get(settings))); @@ -171,6 +171,13 @@ public class Environment { binFile = homeFile.resolve("bin"); libFile = homeFile.resolve("lib"); modulesFile = homeFile.resolve("modules"); + + Settings.Builder finalSettings = Settings.builder().put(settings); + finalSettings.put(PATH_HOME_SETTING.getKey(), homeFile); + finalSettings.putArray(PATH_DATA_SETTING.getKey(), dataPaths); + finalSettings.put(PATH_LOGS_SETTING.getKey(), logsFile); + this.settings = finalSettings.build(); + } /** @@ -332,4 +339,26 @@ public class Environment { public static FileStore getFileStore(Path path) throws IOException { return ESFileStore.getMatchingFileStore(path, fileStores); } + + /** + * asserts that the two environments are equivalent for all things the environment cares about (i.e., all but the setting + * object which may contain different setting) + */ + public static void assertEquivalent(Environment actual, Environment expected) { + assertEquals(actual.dataWithClusterFiles(), expected.dataWithClusterFiles(), "dataWithClusterFiles"); + assertEquals(actual.repoFiles(), expected.repoFiles(), "repoFiles"); + assertEquals(actual.configFile(), expected.configFile(), "configFile"); + assertEquals(actual.scriptsFile(), expected.scriptsFile(), "scriptsFile"); + assertEquals(actual.pluginsFile(), expected.pluginsFile(), "pluginsFile"); + assertEquals(actual.binFile(), expected.binFile(), "binFile"); + assertEquals(actual.libFile(), expected.libFile(), "libFile"); + assertEquals(actual.modulesFile(), expected.modulesFile(), "modulesFile"); + assertEquals(actual.logsFile(), expected.logsFile(), "logsFile"); + assertEquals(actual.pidFile(), expected.pidFile(), "pidFile"); + assertEquals(actual.tmpFile(), expected.tmpFile(), "tmpFile"); + } + + private static void assertEquals(Object actual, Object expected, String name) { + assert Objects.deepEquals(actual, expected) : "actual " + name + " [" + actual + "] is different than [ " + expected + "]"; + } } diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 0e4f83b625d..367131d93cd 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -34,10 +34,10 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -52,6 +52,7 @@ import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.monitor.fs.FsProbe; import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.node.Node; import java.io.Closeable; import java.io.IOException; @@ -79,7 +80,9 @@ import static java.util.Collections.unmodifiableSet; /** * A component that holds all data paths for a single node. */ -public final class NodeEnvironment extends AbstractComponent implements Closeable { +public final class NodeEnvironment implements Closeable { + + private final ESLogger logger; public static class NodePath { /* ${data.paths}/nodes/{node.id} */ @@ -139,8 +142,6 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl private final Path sharedDataPath; private final Lock[] locks; - private final boolean addLockIdToCustomPath; - private final int nodeLockId; private final AtomicBoolean closed = new AtomicBoolean(false); private final Map shardLocks = new HashMap<>(); @@ -177,12 +178,8 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl public static final String NODES_FOLDER = "nodes"; public static final String INDICES_FOLDER = "indices"; public static final String NODE_LOCK_FILENAME = "node.lock"; - public static final String UPGRADE_LOCK_FILENAME = "upgrade.lock"; public NodeEnvironment(Settings settings, Environment environment) throws IOException { - super(settings); - - this.addLockIdToCustomPath = ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.get(settings); if (!DiscoveryNode.nodeRequiresLocalStorage(settings)) { nodePaths = null; @@ -190,12 +187,16 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl locks = null; nodeLockId = -1; nodeMetaData = new NodeMetaData(generateNodeId(settings)); + logger = Loggers.getLogger(getClass(), Node.addNodeNameIfNeeded(settings, this.nodeMetaData.nodeId())); return; } final NodePath[] nodePaths = new NodePath[environment.dataWithClusterFiles().length]; final Lock[] locks = new Lock[nodePaths.length]; boolean success = false; + // trace logger to debug issues before the default node name is derived from the node id + ESLogger startupTraceLogger = Loggers.getLogger(getClass(), settings); + try { sharedDataPath = environment.sharedDataFile(); int nodeLockId = -1; @@ -207,7 +208,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl Path dataDir = environment.dataFiles()[dirIndex]; // TODO: Remove this in 6.0, we are no longer going to read from the cluster name directory if (readFromDataPathWithClusterName(dataDirWithClusterName)) { - DeprecationLogger deprecationLogger = new DeprecationLogger(logger); + DeprecationLogger deprecationLogger = new DeprecationLogger(startupTraceLogger); deprecationLogger.deprecated("ES has detected the [path.data] folder using the cluster name as a folder [{}], " + "Elasticsearch 6.0 will not allow the cluster name as a folder within the data path", dataDir); dataDir = dataDirWithClusterName; @@ -216,20 +217,20 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl Files.createDirectories(dir); try (Directory luceneDir = FSDirectory.open(dir, NativeFSLockFactory.INSTANCE)) { - logger.trace("obtaining node lock on {} ...", dir.toAbsolutePath()); + startupTraceLogger.trace("obtaining node lock on {} ...", dir.toAbsolutePath()); try { locks[dirIndex] = luceneDir.obtainLock(NODE_LOCK_FILENAME); nodePaths[dirIndex] = new NodePath(dir); nodeLockId = possibleLockId; } catch (LockObtainFailedException ex) { - logger.trace("failed to obtain node lock on {}", dir.toAbsolutePath()); + startupTraceLogger.trace("failed to obtain node lock on {}", dir.toAbsolutePath()); // release all the ones that were obtained up until now releaseAndNullLocks(locks); break; } } catch (IOException e) { - logger.trace("failed to obtain node lock on {}", e, dir.toAbsolutePath()); + startupTraceLogger.trace("failed to obtain node lock on {}", e, dir.toAbsolutePath()); lastException = new IOException("failed to obtain lock on " + dir.toAbsolutePath(), e); // release all the ones that were obtained up until now releaseAndNullLocks(locks); @@ -246,6 +247,8 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl throw new IllegalStateException("Failed to obtain node lock, is the following location writable?: " + Arrays.toString(environment.dataWithClusterFiles()), lastException); } + this.nodeMetaData = loadOrCreateNodeMetaData(settings, startupTraceLogger, nodePaths); + this.logger = Loggers.getLogger(getClass(), Node.addNodeNameIfNeeded(settings, this.nodeMetaData.nodeId())); this.nodeLockId = nodeLockId; this.locks = locks; @@ -258,8 +261,6 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl maybeLogPathDetails(); maybeLogHeapDetails(); - this.nodeMetaData = loadOrCreateNodeMetaData(settings, logger, nodePaths); - applySegmentInfosTrace(settings); assertCanWrite(); success = true; @@ -924,10 +925,6 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl } } - Settings getSettings() { // for testing - return settings; - } - /** * Resolve the custom path for a index's shard. * Uses the {@code IndexMetaData.SETTING_DATA_PATH} setting to determine @@ -940,7 +937,7 @@ public final class NodeEnvironment extends AbstractComponent implements Closeabl if (customDataDir != null) { // This assert is because this should be caught by MetaDataCreateIndexService assert sharedDataPath != null; - if (addLockIdToCustomPath) { + if (ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.get(indexSettings.getNodeSettings())) { return sharedDataPath.resolve(customDataDir).resolve(Integer.toString(this.nodeLockId)); } else { return sharedDataPath.resolve(customDataDir); diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index df348a5d6a1..bbbe3b80cd3 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.node.Node; import java.util.Locale; import java.util.concurrent.TimeUnit; @@ -227,7 +228,7 @@ public final class IndexSettings { this.index = indexMetaData.getIndex(); version = Version.indexCreated(settings); logger = Loggers.getLogger(getClass(), settings, index); - nodeName = settings.get("node.name", ""); + nodeName = Node.NODE_NAME_SETTING.get(settings); this.indexMetaData = indexMetaData; numberOfShards = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null); isShadowReplicaIndex = IndexMetaData.isIndexUsingShadowReplicas(settings); diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 7966c8d8f49..a57492750ad 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -132,7 +132,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import java.util.function.Function; import java.util.stream.Collectors; /** @@ -170,7 +169,16 @@ public class Node implements Closeable { } }, Setting.Property.NodeScope); - + /** + * Adds a default node name to the given setting, if it doesn't already exist + * @return the given setting if node name is already set, or a new copy with a default node name set. + */ + public static final Settings addNodeNameIfNeeded(Settings settings, final String nodeId) { + if (NODE_NAME_SETTING.exists(settings)) { + return settings; + } + return Settings.builder().put(settings).put(NODE_NAME_SETTING.getKey(), nodeId.substring(0, 7)).build(); + } private static final String CLIENT_TYPE = "node"; private final Lifecycle lifecycle = new Lifecycle(); @@ -188,53 +196,82 @@ public class Node implements Closeable { * @param preparedSettings Base settings to configure the node with */ public Node(Settings preparedSettings) { - this(InternalSettingsPreparer.prepareEnvironment(preparedSettings, null), Collections.>emptyList()); + this(InternalSettingsPreparer.prepareEnvironment(preparedSettings, null)); } - protected Node(Environment tmpEnv, Collection> classpathPlugins) { - Settings tmpSettings = Settings.builder().put(tmpEnv.settings()) - .put(Client.CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE).build(); + public Node(Environment environment) { + this(environment, Collections.emptyList()); + } + + protected Node(final Environment environment, Collection> classpathPlugins) { final List resourcesToClose = new ArrayList<>(); // register everything we need to release in the case of an error - - tmpSettings = TribeService.processSettings(tmpSettings); - ESLogger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(tmpSettings)); - final String displayVersion = Version.CURRENT + (Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : ""); - final JvmInfo jvmInfo = JvmInfo.jvmInfo(); - logger.info( - "version[{}], pid[{}], build[{}/{}], OS[{}/{}/{}], JVM[{}/{}/{}/{}]", - displayVersion, - jvmInfo.pid(), - Build.CURRENT.shortHash(), - Build.CURRENT.date(), - Constants.OS_NAME, - Constants.OS_VERSION, - Constants.OS_ARCH, - Constants.JVM_VENDOR, - Constants.JVM_NAME, - Constants.JAVA_VERSION, - Constants.JVM_VERSION); - - logger.info("initializing ..."); - - if (logger.isDebugEnabled()) { - logger.debug("using config [{}], data [{}], logs [{}], plugins [{}]", - tmpEnv.configFile(), Arrays.toString(tmpEnv.dataFiles()), tmpEnv.logsFile(), tmpEnv.pluginsFile()); - } - // TODO: Remove this in Elasticsearch 6.0.0 - if (JsonXContent.unquotedFieldNamesSet) { - DeprecationLogger dLogger = new DeprecationLogger(logger); - dLogger.deprecated("[{}] has been set, but will be removed in Elasticsearch 6.0.0", - JsonXContent.JSON_ALLOW_UNQUOTED_FIELD_NAMES); - } - - this.pluginsService = new PluginsService(tmpSettings, tmpEnv.modulesFile(), tmpEnv.pluginsFile(), classpathPlugins); - this.settings = pluginsService.updatedSettings(); - // create the environment based on the finalized (processed) view of the settings - this.environment = new Environment(this.settings); - final List> executorBuilders = pluginsService.getExecutorBuilders(settings); - boolean success = false; + { + // use temp logger just to say we are starting. we can't use it later on because the node name might not be set + ESLogger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(environment.settings())); + logger.info("initializing ..."); + + } try { + Settings tmpSettings = Settings.builder().put(environment.settings()) + .put(Client.CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE).build(); + + tmpSettings = TribeService.processSettings(tmpSettings); + + // create the node environment as soon as possible, to recover the node id and enable logging + try { + nodeEnvironment = new NodeEnvironment(tmpSettings, environment); + resourcesToClose.add(nodeEnvironment); + } catch (IOException ex) { + throw new IllegalStateException("Failed to created node environment", ex); + } + + final boolean hadPredefinedNodeName = NODE_NAME_SETTING.exists(tmpSettings); + tmpSettings = addNodeNameIfNeeded(tmpSettings, nodeEnvironment.nodeId()); + ESLogger logger = Loggers.getLogger(Node.class, tmpSettings); + if (hadPredefinedNodeName == false) { + logger.info("node name [{}] derived from node ID; set [{}] to override", + NODE_NAME_SETTING.get(tmpSettings), NODE_NAME_SETTING.getKey()); + } + + final String displayVersion = Version.CURRENT + (Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : ""); + final JvmInfo jvmInfo = JvmInfo.jvmInfo(); + logger.info( + "version[{}], pid[{}], build[{}/{}], OS[{}/{}/{}], JVM[{}/{}/{}/{}]", + displayVersion, + jvmInfo.pid(), + Build.CURRENT.shortHash(), + Build.CURRENT.date(), + Constants.OS_NAME, + Constants.OS_VERSION, + Constants.OS_ARCH, + Constants.JVM_VENDOR, + Constants.JVM_NAME, + Constants.JAVA_VERSION, + Constants.JVM_VERSION); + + + if (logger.isDebugEnabled()) { + logger.debug("using config [{}], data [{}], logs [{}], plugins [{}]", + environment.configFile(), Arrays.toString(environment.dataFiles()), environment.logsFile(), environment.pluginsFile()); + } + // TODO: Remove this in Elasticsearch 6.0.0 + if (JsonXContent.unquotedFieldNamesSet) { + DeprecationLogger dLogger = new DeprecationLogger(logger); + dLogger.deprecated("[{}] has been set, but will be removed in Elasticsearch 6.0.0", + JsonXContent.JSON_ALLOW_UNQUOTED_FIELD_NAMES); + } + + this.pluginsService = new PluginsService(tmpSettings, environment.modulesFile(), environment.pluginsFile(), classpathPlugins); + this.settings = pluginsService.updatedSettings(); + // create the environment based on the finalized (processed) view of the settings + // this is just to makes sure that people get the same settings, no matter where they ask them from + this.environment = new Environment(this.settings); + Environment.assertEquivalent(environment, this.environment); + + + final List> executorBuilders = pluginsService.getExecutorBuilders(settings); + final ThreadPool threadPool = new ThreadPool(settings, executorBuilders.toArray(new ExecutorBuilder[0])); resourcesToClose.add(() -> ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS)); // adds the context to the DeprecationLogger so that it does not need to be injected everywhere @@ -249,19 +286,13 @@ public class Node implements Closeable { additionalSettings.addAll(builder.getRegisteredSettings()); } final ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, threadPool); - final ScriptModule scriptModule = ScriptModule.create(settings, environment, resourceWatcherService, - pluginsService.filterPlugins(ScriptPlugin.class)); - AnalysisModule analysisModule = new AnalysisModule(environment, pluginsService.filterPlugins(AnalysisPlugin.class)); + final ScriptModule scriptModule = ScriptModule.create(settings, this.environment, resourceWatcherService, + pluginsService.filterPlugins(ScriptPlugin.class)); + AnalysisModule analysisModule = new AnalysisModule(this.environment, pluginsService.filterPlugins(AnalysisPlugin.class)); additionalSettings.addAll(scriptModule.getSettings()); // this is as early as we can validate settings at this point. we already pass them to ScriptModule as well as ThreadPool // so we might be late here already final SettingsModule settingsModule = new SettingsModule(this.settings, additionalSettings, additionalSettingsFilter); - try { - nodeEnvironment = new NodeEnvironment(this.settings, this.environment); - resourcesToClose.add(nodeEnvironment); - } catch (IOException ex) { - throw new IllegalStateException("Failed to created node environment", ex); - } resourcesToClose.add(resourceWatcherService); final NetworkService networkService = new NetworkService(settings); final ClusterService clusterService = new ClusterService(settings, settingsModule.getClusterSettings(), threadPool); @@ -270,7 +301,7 @@ public class Node implements Closeable { final TribeService tribeService = new TribeService(settings, clusterService, nodeEnvironment.nodeId()); resourcesToClose.add(tribeService); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); - final IngestService ingestService = new IngestService(settings, threadPool, environment, + final IngestService ingestService = new IngestService(settings, threadPool, this.environment, scriptModule.getScriptService(), pluginsService.filterPlugins(IngestPlugin.class)); ModulesBuilder modules = new ModulesBuilder(); @@ -287,10 +318,10 @@ public class Node implements Closeable { modules.add(new IndicesModule(namedWriteableRegistry, pluginsService.filterPlugins(MapperPlugin.class))); modules.add(new SearchModule(settings, namedWriteableRegistry, false, pluginsService.filterPlugins(SearchPlugin.class))); modules.add(new ActionModule(DiscoveryNode.isIngestNode(settings), false, settings, - clusterModule.getIndexNameExpressionResolver(), settingsModule.getClusterSettings(), - pluginsService.filterPlugins(ActionPlugin.class))); + clusterModule.getIndexNameExpressionResolver(), settingsModule.getClusterSettings(), + pluginsService.filterPlugins(ActionPlugin.class))); modules.add(new GatewayModule()); - modules.add(new RepositoriesModule(environment, pluginsService.filterPlugins(RepositoryPlugin.class))); + modules.add(new RepositoriesModule(this.environment, pluginsService.filterPlugins(RepositoryPlugin.class))); pluginsService.processModules(modules); CircuitBreakerService circuitBreakerService = createCircuitBreakerService(settingsModule.getSettings(), settingsModule.getClusterSettings()); @@ -306,7 +337,7 @@ public class Node implements Closeable { b.bind(PluginsService.class).toInstance(pluginsService); b.bind(Client.class).toInstance(client); b.bind(NodeClient.class).toInstance(client); - b.bind(Environment.class).toInstance(environment); + b.bind(Environment.class).toInstance(this.environment); b.bind(ThreadPool.class).toInstance(threadPool); b.bind(NodeEnvironment.class).toInstance(nodeEnvironment); b.bind(TribeService.class).toInstance(tribeService); @@ -316,14 +347,14 @@ public class Node implements Closeable { b.bind(ScriptService.class).toInstance(scriptModule.getScriptService()); b.bind(AnalysisRegistry.class).toInstance(analysisModule.getAnalysisRegistry()); b.bind(IngestService.class).toInstance(ingestService); - pluginComponents.stream().forEach(p -> b.bind((Class)p.getClass()).toInstance(p)); + pluginComponents.stream().forEach(p -> b.bind((Class) p.getClass()).toInstance(p)); } ); injector = modules.createInjector(); List pluginLifecycleComponents = pluginComponents.stream() .filter(p -> p instanceof LifecycleComponent) - .map(p -> (LifecycleComponent)p).collect(Collectors.toList()); + .map(p -> (LifecycleComponent) p).collect(Collectors.toList()); pluginLifecycleComponents.addAll(pluginsService.getGuiceServiceClasses().stream() .map(injector::getInstance).collect(Collectors.toList())); resourcesToClose.addAll(pluginLifecycleComponents); @@ -331,6 +362,8 @@ public class Node implements Closeable { client.intialize(injector.getInstance(new Key>() {})); + logger.info("initialized"); + success = true; } catch (IOException ex) { throw new ElasticsearchException("failed to bind service", ex); @@ -339,8 +372,6 @@ public class Node implements Closeable { IOUtils.closeWhileHandlingException(resourcesToClose); } } - - logger.info("initialized"); } /** diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index 0c0482bfc63..dba7f303130 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -21,18 +21,13 @@ package org.elasticsearch.node.internal; import org.elasticsearch.cli.Terminal; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.env.Environment; -import java.io.BufferedReader; import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; @@ -64,7 +59,7 @@ public class InternalSettingsPreparer { public static Settings prepareSettings(Settings input) { Settings.Builder output = Settings.builder(); initializeSettings(output, input, true, Collections.emptyMap()); - finalizeSettings(output, null, null); + finalizeSettings(output, null); return output.build(); } @@ -120,7 +115,7 @@ public class InternalSettingsPreparer { // re-initialize settings now that the config file has been loaded // TODO: only re-initialize if a config file was actually loaded initializeSettings(output, input, false, properties); - finalizeSettings(output, terminal, environment.configFile()); + finalizeSettings(output, terminal); environment = new Environment(output.build()); @@ -145,9 +140,8 @@ public class InternalSettingsPreparer { /** * Finish preparing settings by replacing forced settings, prompts, and any defaults that need to be added. * The provided terminal is used to prompt for settings needing to be replaced. - * The provided configDir is optional and will be used to lookup names.txt if the node name is not set, if provided. */ - private static void finalizeSettings(Settings.Builder output, Terminal terminal, Path configDir) { + private static void finalizeSettings(Settings.Builder output, Terminal terminal) { // allow to force set properties based on configuration of the settings provided List forcedSettings = new ArrayList<>(); for (String setting : output.internalMap().keySet()) { @@ -167,42 +161,6 @@ public class InternalSettingsPreparer { } replacePromptPlaceholders(output, terminal); - // all settings placeholders have been resolved. resolve the value for the name setting by checking for name, - // then looking for node.name, and finally generate one if needed - String name = output.get("node.name"); - if (name == null || name.isEmpty()) { - name = randomNodeName(configDir); - output.put("node.name", name); - } - } - - private static String randomNodeName(Path configDir) { - InputStream input; - if (configDir != null && Files.exists(configDir.resolve("names.txt"))) { - Path namesPath = configDir.resolve("names.txt"); - try { - input = Files.newInputStream(namesPath); - } catch (IOException e) { - throw new RuntimeException("Failed to load custom names.txt from " + namesPath, e); - } - } else { - input = InternalSettingsPreparer.class.getResourceAsStream("/config/names.txt"); - } - - try { - List names = new ArrayList<>(); - try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8))) { - String name = reader.readLine(); - while (name != null) { - names.add(name); - name = reader.readLine(); - } - } - int index = Randomness.get().nextInt(names.size()); - return names.get(index); - } catch (IOException e) { - throw new RuntimeException("Could not read node names list", e); - } } private static void replacePromptPlaceholders(Settings.Builder settings, Terminal terminal) { diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 572f5e0ce13..eb44a897386 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -216,7 +216,7 @@ public class TribeService extends AbstractLifecycleComponent { } } Settings.Builder sb = Settings.builder().put(tribeSettings); - sb.put("node.name", globalSettings.get("node.name") + "/" + tribeName); + sb.put(Node.NODE_NAME_SETTING.getKey(), Node.NODE_NAME_SETTING.get(globalSettings) + "/" + tribeName); sb.put(Environment.PATH_HOME_SETTING.getKey(), Environment.PATH_HOME_SETTING.get(globalSettings)); // pass through ES home dir if (Environment.PATH_CONF_SETTING.exists(globalSettings)) { sb.put(Environment.PATH_CONF_SETTING.getKey(), Environment.PATH_CONF_SETTING.get(globalSettings)); diff --git a/core/src/main/resources/config/names.txt b/core/src/main/resources/config/names.txt deleted file mode 100644 index 9c42c2b10f8..00000000000 --- a/core/src/main/resources/config/names.txt +++ /dev/null @@ -1,2825 +0,0 @@ -3-D Man -A'lars -Aardwolf -Abdul Alhazred -Abe Brown -Abigail Brand -Abner Jenkins -Abner Little -Abominable Snowman -Abomination -Abominatrix -Abraham Cornelius -Abraxas -Absalom -Absorbing Man -Abyss -Access -Achebe -Achelous -Achilles -Acrobat -Adam II -Adam Warlock -Adam X -Adaptoid -Administrator -Adonis -Adrenazon -Adri Nital -Adrian Corbo -Adrian Toomes -Adrienne Frost -Adversary -Advisor -Aegis -Aelfyre Whitemane -Aero -Aftershock -Agamemnon -Agamotto -Agatha Harkness -Aged Genghis -Agent -Agent Axis -Agent Cheesecake -Agent X -Agent Zero -Aggamon -Aginar -Agon -Agony -Agron -Aguja -Ahab -Ahmet Abdol -Ahura -Air-Walker -Airborne -Aireo -Airstrike -Ajak -Ajax -Ajaxis -Akasha -Akhenaten -Al MacKenzie -Alaris -Albert -Albino -Albion -Alchemy -Alcmena -Aldebron -Aleksander Lukin -Aleksei Sytsevich -Aleta Ogord -Alex -Alex Hayden -Alex Power -Alex Wilder -Alexander Bont -Alexander Goodwin Pierce -Alexander Lexington -Alexander Summers -Alfie O'Meggan -Algrim the Strong -Alibar -Alicia Masters -Alistair Smythe -Alistaire Stuart -Aliyah Bishop -Alkhema -All-American -Allatou -Allison Blaire -Alpha Ray -Alpha the Ultimate Mutant -Alyosha Kravinoff -Alysande Stuart -Alyssa Moy -Amahl Farouk -Amalgam -Amanda Sefton -Amatsu-Mikaboshi -Amazon -Amber Hunt -Amelia Voght -Amergin -American Ace -American Dream -American Eagle -American Samurai -Americop -Ameridroid -Amiko Kobayashi -Amina Synge -Aminedi -Ammo -Amphibian -Amphibion -Amphibius -Amun -Anaconda -Anais -Analyzer -Anarchist -Ancient One -Andreas von Strucker -Andrew Chord -Andrew Gervais -Android Man -Andromeda -Anelle -Angar the Screamer -Angel -Angel Dust -Angel Face -Angel Salvadore -Angela Cairn -Angela Del Toro -Angelica Jones -Angelo Unuscione -Angler -Ani-Mator -Animus -Ankhi -Annalee -Anne-Marie Cortez -Annex -Annie Ghazikhanian -Annihilus -Anole -Anomalito -Anomaloco -Anomaly -Answer -Ant-Man -Anthropomorpho -Anti-Cap -Anti-Phoenix Force -Anti-Venom -Anti-Vision -Antimatter -Antiphon the Overseer -Antonio -Anubis -Anvil -Anything -Apache Kid -Apalla -Ape -Ape-Man -Ape-X -Apocalypse -Apollo -Apryll -Aquarian -Aquarius -Aqueduct -Arabian Knight -Arachne -Aragorn -Araki -Aralune -Araña -Arc -Arcade -Arcademan -Arcanna -Archangel -Archenemy -Archer -Archie Corrigan -Archimage -Architect -Arclight -Arcturus Rann -Ardina -Ardroman -Arena -Ares -Argo -Argus -Ariann -Arides -Ariel -Aries -Arishem the Judge -Arize -Arizona Annie -Arkady Rossovich -Arkon -Arkus -Arlette Truffaut -Arlok -Armadillo -Armageddon -Armand Martel -Armor -Armory -Arnim Zola -Arno Stark -Arranger -Arsenal -Arsenic -Artemis -Arthur Parks -Artie -Artie Maddicks -Arturo Falcones -Asbestos Lady -Asbestos Man -Ashcan -Asmodeus -Asp -Assassin -Asteroth -Astra -Astrid Bloom -Astron -Astronomer -Asylum -Atalanta -Atalon -Athena -Atlas -Atleza -Atom Bob -Atom-Smasher -Att-Lass -Attuma -Atum -Aunt May Parker -Auntie Freeze -Auric -Aurora -Authority -Autolycus -Avalanche -Avarrish -Awesome Android -Axum -Azazel -Baal -Balder -Balor -Balthakk -Bandit -Banshee -Bantam -Baphomet -Barbarus -Barnacle -Baron Blood -Baron Brimstone -Baron Macabre -Baron Mordo -Baron Samedi -Baron Strucker -Baron Von Blitzschlag -Baron Zemo -Baroness Blood -Barracuda -Bart Hamilton -Base -Basil Sandhurst -Basilisk -Bast -Bastion -Batragon -Batroc the Leaper -Battering Ram -Battleaxe -Battlestar -Battletide -Batwing -Beast -Beautiful Dreamer -Bedlam -Bedlam II -Beetle -Beetle II -Behemoth -Bela -Belasco -Belathauzer -Bella Donna -Ben Parker -Ben Reilly -Ben Urich -Benazir Kaur -Benedict Kine -Bengal -Benjamin Jacob Grimm -Bennet du Paris -Benny Beckley -Bentley Wittman -Bereet -Berzerker -Bes -Beta Ray Bill -Bethany Cabe -Betty Brant -Betty Brant Leeds -Betty Ross Banner -Bevatron -Beyonder -Bi-Beast -Bible John -Big Bertha -Big Man -Big Wheel -Bill Foster -Binary -Bird-Brain -Bird-Man -Bishop -Bison -Bizarnage -Black Bolt -Black Box -Black Cat -Black Crow -Black Death -Black Dragon -Black Fox -Black Goliath -Black Jack Tarr -Black King -Black Knight -Black Lama -Black Mamba -Black Marvel -Black Panther -Black Queen -Black Talon -Black Tarantula -Black Tom Cassidy -Black Widow -Blackbird -Blackheart -Blackheath -Blacklash -Blackout -Blackwing -Blackwulf -Blade -Blaquesmith -Blastaar -Blaze -Blazing Skull -Blind Faith -Blind Justice -Blindside -Blindspot -Bling -Blink -Blistik -Blitziana -Blitzkrieger -Blizzard -Blizzard II -Blob -Blockbuster -Bloke -Blonde Phantom -Blood Brothers -Blood Rose -Blood Spider -Bloodaxe -Bloodhawk -Bloodlust -Bloodlust II -Bloodscream -Bloodshed -Bloodsport -Bloodstorm -Bloodtide -Bloodwraith -Blowhard -Blue Bullet -Blue Diamond -Blue Marvel -Blue Shield -Blue Streak -Blur -Bob -Bob Diamond -Bobster -Bogeyman -Bombshell -Boneyard -Bonita Juarez -Boobytrap -Book -Boom Boom -Boom Boy -Boomer -Boomerang -Boomslang -Boost -Bora -Bounty -Bounty Hunter -Bova -Box -Box IV -Brain Cell -Brain Drain -Brain-Child -Brainchild -Bram Velsing -Brass -Bres -Brian Braddock -Brian Falsworth -Brigade -Briquette -Brother Nature -Brother Tode -Brother Voodoo -Brothers Grimm -Bruiser -Brunnhilda -Brutacus -Brute I -Brute II -Brute III -Brynocki -Bucky -Bucky III -Bug -Bulldozer -Bullet -Bullseye -Burner -Burstarr -Bushman -Bushmaster -Bushwacker -Butterball -Buzz -Buzzard -Byrrah -Caber -Cable -Cadaver -Cagliostro -Caiera -Caiman -Cain -Cain Marko -Caleb Alexander -Caliban -Callisto -Calvin Rankin -Calypso -Cameron Hodge -Canasta -Cancer -Candra -Cannonball -Cannonball I -Cap 'N Hawk -Caprice -Capricorn -Captain America -Captain Atlas -Captain Barracuda -Captain Britain -Captain Fate -Captain Germany -Captain Marvel -Captain Omen -Captain Savage -Captain UK -Captain Ultra -Captain Universe -Captain Wings -Captain Zero -Cardiac -Cardinal -Caregiver -Caretaker -Carl "Crusher" Creel -Carlos Lobo -Carmella Unuscione -Carmilla Black -Carnage -Carnivore -Carolyn Parmenter -Carolyn Trainer -Carrie Alexander -Carrion -Carter Ghazikhanian -Cassandra Nova -Cassie Lang -Cassiopea -Cat -Cat-Man -Catiana -Cayman -Cecelia Reyes -Cecilia Reyes -Celestial Madonna -Centennial -Centurion -Centurious -Centurius -Cerberus -Cerebra -Cerise -Cessily Kincaid -Cethlann -Ch'od -Chaka -Challenger -Chamber -Chameleon -Champion of the Universe -Chan Luichow -Chance -Changeling -Chaos -Charcoal -Charles Xavier -Charlie-27 -Charon -Chase Stein -Cheetah -Chemistro -Chen Lu -Chi Demon -Chief Examiner -Chimera -Chloe Tran -Choice -Chondu the Mystic -Christopher Summers -Chrome -Chronos -Chthon -Chtylok -Citizen V -Claire Voyant -Claudette St. Croix -Clea -Clearcut -Cletus Kasady -Clint Barton -Clive -Cloak -Cloud -Cloud 9 -Clown -Coach -Coachwhip -Cobalt Man -Cobra -Cody Mushumanski Gun Man -Cold War -Coldblood -Coldfire -Collective Man -Collector -Colleen Wing -Colonel -Colonel America -Colossus -Comet -Comet Man -Commander Kraken -Commando -Conan the Barbarian -Condor -Conquer Lord -Conquest -Conquistador -Conrad Josten -Constrictor -Contemplator -Contessa -Contrary -Controller -Copperhead -Copycat -Coral -Cordelia Frost -Cornelius van Lunt -Corona -Corruptor -Corsair -Cottonmouth -Count Abyss -Count Nefaria -Courier -Cowgirl -Crazy Eight -Crime Master -Crime-Buster -Crimebuster -Crimson -Crimson Cavalier -Crimson Commando -Crimson Cowl -Crimson Craig -Crimson Daffodil -Crimson Dynamo -Crimson Dynamo V -Crimson and the Raven -Crippler -Crooked Man -Crossbones -Crossfire -Crown -Crucible -Crusader -Crusher -Crystal -Curtis Connors -Cutthroat -Cybele -Cybelle -Cyber -Cyborg X -Cyclone -Cyclops -Cypher -D'Ken -D'Spayre -D-Man -DJ -Dagger -Daisy Johnson -Dakimh the Enchanter -Dakota North -Damballah -Damion Hellstrom -Damon Dran -Dan Ketch -Danger -Daniel Rand -Danielle Moonstar -Dansen Macabre -Danvers Carol -Daredevil -Dark Angel -Dark Beast -Dark Phoenix -Dark-Crawler -Darkdevil -Darkhawk -Darkoth -Darkstar -David Cannon -Daytripper -Dazzler -Deacon Frost -Dead Girl -Deadhead -Deadly Ernest -Deadpool -Death -Death Adder -Death's Head -Death's Head II -Death-Stalker -Deathbird -Deathlok -Deathstroke -Deathurge -Deathwatch -Deborah Ritter -Debra Whitman -Decay -Decay II -Defensor -Delilah -Delphi -Delphine Courtney -Dementia -Demiurge -Demogoblin -Demogorge the God-Eater -Demolition Man -Derrick Slegers Speed -Desmond Pitt -Destiny -Destroyer -Destroyer of Demons -Devastator -Devil Dinosaur -Devil Hunter Gabriel -Devil-Slayer -Devos the Devastator -Diablo -Diamanda Nero -Diamond Lil -Diamondback -Diamondhead -Digitek -Dionysus -Dirtnap -Discus -Dittomaster -Dmitri Bukharin -Dmitri Smerdyakov -Doc Samson -Doctor Anthony Droom -Doctor Arthur Nagan -Doctor Bong -Doctor Demonicus -Doctor Doom -Doctor Dorcas -Doctor Droom -Doctor Druid -Doctor Faustus -Doctor Glitternight -Doctor Leery -Doctor Minerva -Doctor Octopus -Doctor Spectrum -Doctor Strange -Doctor Sun -Domina -Dominic Fortune -Dominic Petros -Domino -Dominus -Domo -Don Fortunato -Donald "Donny" Gill -Donald Pierce -Donald Ritter -Doop -Doorman -Doppelganger -Doppleganger -Dorma -Dormammu -Double Helix -Doug Ramsey -Doug and Jerry -Dougboy -Doughboy -Douglas Birely -Douglas Ramsey -Douglock -Dr. John Grey -Dr. Lemuel Dorcas -Dr. Marla Jameson -Dr. Otto Octavius -Dracula -Dragon Lord -Dragon Man -Dragon of the Moon -Dragoness -Dragonfly -Dragonwing -Drax the Destroyer -Dreadknight -Dreadnought -Dream Weaver -Dreaming Celestial -Dreamqueen -Dredmund Druid -Dromedan -Druid -Druig -Dum-Dum Dugan -Dusk -Dust -Dweller-in-Darkness -Dyna-Mite -Earth Lord -Earthquake -Ebenezer Laughton -Ebon Seeker -Echo -Ecstasy -Ectokid -Eddie Brock -Edward "Ned" Buckman -Edwin Jarvis -Eel -Egghead -Ego the Living Planet -El Aguila -El Muerto -Elaine Grey -Elathan -Electric Eve -Electro -ElectroCute -Electron -Eleggua -Elektra -Elektra Natchios -Elektro -Elf With A Gun -Elfqueen -Elias Bogan -Eliminator -Elixir -Elizabeth "Betsy" Braddock -Elizabeth Twoyoungmen -Ellie Phimster -Elsie-Dee -Elven -Elysius -Emil Blonsky -Emma Frost -Empath -Empathoid -Emplate -En Sabah Nur -Enchantress -Energizer -Enforcer -Enigma -Ent -Entropic Man -Eon -Epoch -Equilibrius -Equinox -Ereshkigal -Erg -Eric Slaughter -Eric Williams -Eric the Red -Erik Josten -Erik Killmonger -Erik Magnus Lehnsherr -Ernst -Eros -Eshu -Eson the Searcher -Eternal Brain -Eternity -Ethan Edwards -Eugene Judd -Ev Teel Urizen -Evangeline Whedon -Ever -Everett Thomas -Everyman -Evilhawk -Executioner -Exodus -Exploding Man -Exterminator -Ezekiel -Ezekiel Sims -Ezekiel Stane -Fabian Cortez -Fafnir -Fagin -Falcon -Fallen One -Famine -Fan Boy -Fandral -Fang -Fantasia -Fantastic Four -Fantomex -Farallah -Fasaud -Fashima -Fatale -Fateball -Father Time -Fault Zone -Fearmaster -Feedback -Felicia Hardy -Feline -Fenris -Fenris Wolf -Fer-de-Lance -Feral -Feron -Fever Pitch -Fight-Man -Fin -Fin Fang Foom -Firearm -Firebird -Firebolt -Firebrand -Firefrost -Firelord -Firepower -Firestar -Fixer -Fixx -Flag-Smasher -Flambe -Flash Thompson -Flatman -Flex -Flint Marko -Flubber -Fly -Flygirl -Flying Tiger -Foggy Nelson -Fontanelle -Foolkiller -Forbush Man -Force -Forearm -Foreigner -Forge -Forgotten One -Foxfire -Frank Castle -Frank Drake -Frank Payne -Frank Simpson -Frankenstein's Monster -Frankie Raye -Frankie and Victoria -Franklin Hall -Franklin Richards -Franklin Storm -Freak -Freak of Science -Freakmaster -Freakshow -Fred Myers -Frederick Slade -Free Spirit -Freedom Ring -Frenzy -Frey -Frigga -Frog-Man -Fury -Fusion -Futurist -G-Force -Gabe Jones -Gabriel Summers -Gabriel the Air-Walker -Gaea -Gaia -Gailyn Bailey -Galactus -Galaxy Master -Gambit -Gammenon the Gatherer -Gamora -Ganymede -Gardener -Gargantua -Gargantus -Gargouille -Gargoyle -Garokk the Petrified Man -Garrison Kane -Gatecrasher -Gateway -Gauntlet -Gavel -Gaza -Gazelle -Gazer -Geb -Gee -Geiger -Geirrodur -Gemini -General Orwell Taylor -Genis-Vell -George Stacy -George Tarleton -George Washington Bridge -Georgianna Castleberry -Gertrude Yorkes -Ghaur -Ghost -Ghost Dancer -Ghost Girl -Ghost Maker -Ghost Rider -Ghost Rider 2099 -Ghoul -Giant-Man -Gibbon -Gibborim -Gideon -Gideon Mace -Giganto -Gigantus -Gin Genie -Gladiator -Gladiatrix -Glamor -Glenn Talbot -Glitch -Glob -Glob Herman -Gloom -Glorian -Goblin Queen -Goblyn -Godfrey Calthrop -Gog -Goldbug -Golden Archer -Golden Girl -Golden Oldie -Goldeneye -Golem -Goliath -Gomi -Googam -Gorgeous George -Gorgilla -Gorgon -Gorilla Girl -Gorilla-Man -Gorr -Gosamyr -Grand Director -Grandmaster -Grappler -Grasshopper -Grasshopper II -Graviton -Gravity -Graydon Creed -Great Gambonnos -Great Video -Green Goblin -Green Goblin IV -Greer Grant -Greer Grant Nelson -Gregor Shapanka -Gregory Gideon -Gremlin -Grenade -Grey Gargoyle -Grey King -Griffin -Grim Hunter -Grim Reaper -Grizzly -Grog the God-Crusher -Gronk -Grotesk -Groundhog -Growing Man -Guardsman -Guido Carosella -Gunthar of Rigel -Gwen Stacy -Gypsy Moth -H.E.R.B.I.E. -Hack -Hag -Hairbag -Halflife -Halloween Jack -Hamilton Slade -Hammer Harrison -Hammer and Anvil -Hammerhead -Hangman -Hank McCoy -Hank Pym -Hanna Levy -Hannah Levy -Hannibal King -Harald Jaekelsson -Hardcase -Hardcore -Hardnose -Hardshell -Hardwire -Hargen the Measurer -Harmonica -Harness -Harold "Happy" Hogan -Harold H. Harold -Harpoon -Harpy -Harrier -Harry Leland -Harry Osborn -Hate-Monger -Haven -Havok -Hawkeye -Hawkeye II -Hawkshaw -Haywire -Hazard -Hazmat -Headknocker -Headlok -Heart Attack -Heather Cameron -Hebe -Hecate -Hector -Heimdall -Heinrich Zemo -Hela -Helio -Hellcat -Helleyes -Hellfire -Hellion -Hellrazor -Helmut Zemo -Henry "Hank" McCoy -Henry Peter Gyrich -Hensley Fargus -Hephaestus -Hepzibah -Her -Hera -Herbert Edgar Wyndham -Hercules -Herman Schultz -Hermes -Hermod -Hero -Hero for Hire -Herr Kleiser -Hideko Takata -High Evolutionary -High-Tech -Hijacker -Hildegarde -Him -Hindsight Lad -Hippolyta -Hisako Ichiki -Hit-Maker -Hitman -Hobgoblin -Hobgoblin II -Hoder -Hogun -Holly -Honcho -Honey Lemon -Hood -Hornet -Horus -Howard the Duck -Hrimhari -Hub -Hugh Jones -Hulk -Hulk 2099 -Hulkling -Human Cannonball -Human Fly -Human Robot -Human Top -Human Top II -Human Torch -Human Torch II -Humbug -Humus Sapien -Huntara -Hurricane -Husk -Hussar -Hybrid -Hybrid II -Hyde -Hydro -Hydro-Man -Hydron -Hyperion -Hyperkind -Hyperstorm -Hypnotia -Hyppokri -ISAAC -Icarus -Iceman -Icemaster -Idunn -Iguana -Ikaris -Ikonn -Ikthalon -Illusion -Illyana Rasputin -Immortus -Impala -Imperial Hydra -Impossible Man -Impulse -In-Betweener -Indech -Indra -Inertia -Infamnia -Infant Terrible -Infectia -Inferno -Infinity -Interloper -Invisible Girl -Invisible Woman -Inza -Ion -Iridia -Iron Cross -Iron Fist -Iron Lad -Iron Maiden -Iron Man -Iron Man 2020 -Iron Monger -Ironclad -Isaac Christians -Isaiah Bradley -Isbisa -Isis -Ivan Kragoff -J. Jonah Jameson -J2 -Jack Flag -Jack Frost -Jack Kirby -Jack O'Lantern -Jack Power -Jack of Hearts -Jack-in-the-Box -Jackal -Jackdaw -Jackhammer -Jackpot -Jackson Arvad -Jacob "Jake" Fury -Jacqueline Falsworth -Jacques DuQuesne -Jade Dragon -Jaeger -Jaguar -Jamal Afari -James "Jimmy" Marks -James Dr. Power -James Howlett -James Jaspers -James Madrox -James Proudstar -James Rhodes -James Sanders -Jamie Braddock -Jane Foster -Jane Kincaid -Janet van Dyne -Jann -Janus -Jared Corbo -Jarella -Jaren -Jason -Jawynn Dueck the Iron Christian of Faith -Jazz -Jean DeWolff -Jean Grey -Jean Grey-Summers -Jean-Paul Beaubier -Jeanne-Marie Beaubier -Jebediah Guthrie -Jeffrey Mace -Jekyll -Jennifer Kale -Jennifer Walters -Jens Meilleur Slap Shot -Jericho Drumm -Jerome Beechman -Jerry Jaxon -Jessica Drew -Jessica Jones -Jester -Jigsaw -Jim Hammond -Jimaine Szardos -Jimmy Woo -Jocasta -Joe Cartelli -Joe Fixit -Joey Bailey -Johann Schmidt -John Doe -John Falsworth -John Jameson -John Proudstar -John Ryker -John Sublime -John Walker -Johnny Blaze -Johnny Ohm -Johnny Storm -Jolt -Jon Spectre -Jonas Harrow -Jonathan "John" Garrett -Jonathan Richards -Jonothon Starsmore -Jordan Seberius -Joseph -Joshua Guthrie -Joystick -Jubilee -Judas Traveller -Jude the Entropic Man -Juggernaut -Julie Power -Jumbo Carnation -Junkpile -Junta -Justice -Justin Hammer -Justine Hammer -Ka-Zar -Kaine -Kala -Kaluu -Kamal -Kamo Tharnn -Kamuu -Kang the Conqueror -Kangaroo -Karen Page -Karima Shapandar -Karkas -Karl Lykos -Karl Malus -Karl Mordo -Karla Sofen -Karma -Karnak -Karnilla -Karolina Dean -Karthon the Quester -Kasper Cole -Kate Bishop -Kate Neville -Katherine "Kitty" Pryde -Katherine Reynolds -Katie Power -Katrina Luisa van Horne -Katu -Keen Marlow -Kehl of Tauran -Keith Kilham -Kem Horkus -Kenneth Crichton -Key -Khaos -Khonshu -Khoryphos -Kiber the Cruel -Kick-Ass -Kid Colt -Kid Nova -Kiden Nixon -Kierrok -Killer Shrike -Killpower -Killraven -Kilmer -Kimura -King Bedlam -Kingo Sunen -Kingpin -Kirigi -Kirtsyn Perrin Short Stop -Kismet -Kismet Deadly -Kiss -Kiwi Black -Kkallakku -Kl'rt -Klaatu -Klaw -Kleinstocks -Knickknack -Kofi Whitemane -Kogar -Kohl Harder Boulder Man -Korath the Pursuer -Korg -Kormok -Korrek -Korvac -Korvus -Kosmos -Kraken -Krakkan -Krang -Kraven the Hunter -Krista Marwan -Kristoff Vernard -Kristoff von Doom -Kro -Krystalin -Kubik -Kukulcan -Kurse -Kurt Wagner -Kwannon -Kyle Gibney -Kylun -Kymaera -La Lunatica -La Nuit -Lacuna -Lady Deathstrike -Lady Jacqueline Falsworth Crichton -Lady Killer -Lady Lark -Lady Lotus -Lady Mandarin -Lady Mastermind -Lady Octopus -Lament -Lancer -Landslide -Larry Bodine -Lasher -Laura Dean -Layla Miller -Lazarus -Leader -Leap-Frog -Leash -Lee Forrester -Leech -Left Hand -Left-Winger -Legacy -Legion -Leila Davis -Leir -Lemuel Dorcas -Leo -Leonard Samson -Leonus -Letha -Levan -Lianda -Libra -Lifeforce -Lifeguard -Lifter -Lightbright -Lighting Rod -Lightmaster -Lightspeed -Lila Cheney -Lilandra Neramani -Lilith, the Daughter of Dracula -Lin Sun -Link -Lionheart -Live Wire -Living Brain -Living Colossus -Living Diamond -Living Eraser -Living Hulk -Living Laser -Living Lightning -Living Monolith -Living Mummy -Living Pharaoh -Living Planet -Living Totem -Living Tribunal -Liz Allan -Lizard -Llan the Sorcerer -Lloigoroth -Llyra -Llyron -Loa -Lockdown -Lockheed -Lockjaw -Locksmith -Locus -Locust -Lodestone -Logan -Loki -Longneck -Longshot -Lonnie Thompson Lincoln -Looter -Lord Chaos -Lord Dark Wind -Lord Pumpkin -Lorelei -Lorelei II -Lorelei Travis -Lorna Dane -Lorvex -Loss -Louise Mason -Lucas Brand -Luchino Nefaria -Lucifer -Ludi -Luke Cage -Luna -Lunatica -Lunatik -Lupa -Lupo -Lurking Unknown -Lyja -Lynx -M -M-Twins -MN-E (Ultraverse) -MODAM -MODOK -Mac Gargan -Mach-IV -Machine Man -Machine Teen -Machinesmith -Mad Dog Rassitano -Mad Jack -Mad Jim Jaspers -Mad Thinker -Mad Thinker’s Awesome Android -Mad-Dog -Madam Slay -Madame Hydra -Madame MacEvil -Madame Masque -Madame Menace -Madame Web -Madcap -Madeline Joyce -Madelyne Pryor -Madison Jeffries -Maelstrom -Maestro -Magdalena -Magdalene -Maggott -Magician -Magik -Magilla -Magma -Magneto -Magnum -Magnus -Magus -Maha Yogi -Mahkizmo -Major Mapleleaf -Makkari -Malekith the Accursed -Malice -Mammomax -Man Mountain Marko -Man-Ape -Man-Beast -Man-Brute -Man-Bull -Man-Eater -Man-Elephant -Man-Killer -Man-Spider -Man-Thing -Man-Wolf -Manbot -Mandarin -Mandrill -Mandroid -Mangle -Mangog -Manikin -Manslaughter -Manta -Mantis -Mantra -Mar-Vell -Marc Spector -Marduk Kurios -Margali Szardos -Margaret Power -Margo Damian -Maria Hill -Mariko Yashida -Marius St. Croix -Mark Gervaisnight Shade -Mark Raxton -Mark Scarlotti -Mark Todd -Marlene Alraune -Marrina -Marrina Smallwood -Marrow -Marsha Rosenberg -Martha Johansson -Martin Gold -Martin Preston -Martinex -Marvel Boy -Marvel Girl -Marvel Man -Marvin Flumm -Mary "Skeeter" MacPherran -Mary Jane Parker -Mary Jane Watson -Mary Walker -Mary Zero -Masked Marauder -Masked Marvel -Masked Rose -Masque -Mass Master -Master Khan -Master Man -Master Menace -Master Mold -Master Order -Master Pandemonium -Master of Vengeance -Mastermind -Mastermind of the UK -Matador -Match -Matsu'o Tsurayaba -Matt Murdock -Mauler -Maur-Konn -Mauvais -Maverick -Max -Maxam -Maximus -Maxwell Dillon -May "Mayday" Parker -May Parker -Mayhem -Maynard Tiboldt -Meanstreak -Meathook -Mechamage -Medusa -Meggan -Meggan Braddock -Mekano -Meld -Melee -Melissa Gold -Melody Guthrie -Meltdown -Melter -Mentallo -Mentor -Mentus -Mephisto -Mercurio -Mercury -Mercy -Merlin -Mesmero -Metal Master -Metalhead -Meteor Man -Meteorite -Meteorite II -Michael Nowman -Michael Twoyoungmen -Micro -Microchip -Micromax -Midas -Midgard Serpent -Midnight -Midnight Man -Midnight Sun -Miek -Miguel Espinosa -Miguel O'Hara -Miguel Santos -Mikado -Mikey -Mikhail Rasputin -Mikula Golubev -Milan -Miles Warren -Milos Masaryk -Mimic -Mimir -Mindmeld -Mindworm -Miracle Man -Mirage -Mirage II -Misfit -Miss America -Missing Link -Mist Mistress -Mister Buda -Mister Doll -Mister Fear -Mister Hyde -Mister Jip -Mister Machine -Mister One -Mister Sensitive -Mister Sinister -Mister Two -Mister X -Misty Knight -Mockingbird -Modred the Mystic -Mogul of the Mystic Mountain -Moira Brandon -Moira MacTaggert -Mojo -Mole Man -Molecule Man -Molly Hayes -Molten Man -Mondo -Monet St. Croix -Mongoose -Monica Rappaccini -Monsoon -Monstra -Monstro the Mighty -Moon Knight -Moon-Boy -Moondark -Moondragon -Moonhunter -Moonstone -Mop Man -Morbius -Mordred -Morg -Morgan Le Fay -Morlun -Morning Star -Morph -Morpheus -Morris Bench -Mortimer Toynbee -Moses Magnum -Mosha -Mother Earth -Mother Nature -Mother Night -Mother Superior -Motormouth -Mountjoy -Mr. Fish -Mr. Justice -Mr. M -Mr. Wu -Ms. MODOK -Ms. Marvel -Ms. Steed -Multiple Man -Murmur -Murmur II -Mutant Master -Mutant X -Myron MacLain -Mys-Tech -Mysterio -Mystique -N'Gabthoth -N'Garai -N'astirh -NFL Superpro -Naga -Nameless One -Namor McKenzie -Namor the Sub-Mariner -Namora -Namorita -Nanny -Nate Grey -Nathaniel Essex -Nathaniel Richards -Native -Nebula -Nebulo -Nebulon -Nebulos -Necrodamus -Necromantra -Ned Horrocks -Ned Leeds -Needle -Nefarius -Negasonic Teenage Warhead -Nekra -Nekra Sinclar -Nemesis -Neophyte -Neptune -Network -Neuronne -Neurotap -New Goblin -Nezarr the Calculator -Nicholas Maunder -Nicholas Scratch -Nick Fury -Nico Minoru -Nicole St. Croix -Night Nurse -Night Rider -Night Thrasher -Nightcrawler -Nighthawk -Nightmare -Nightshade -Nightside -Nightwatch -Nightwind -Nikki -Niles Van Roekel -Nimrod -Ningal -Nitro -Nobilus -Nocturne -Noh-Varr -Nomad -Norman Osborn -Norns -Norrin Radd -Northstar -Nosferata -Nova -Nova-Prime -Novs -Nox -Nth Man -Nth Man: the Ultimate Ninja -Nuke - Frank Simpson -Nuke - Squadron Supreme Member -Nuklo -Numinus -Nut -Obadiah Stane -Obituary -Obliterator -Oblivion -Occulus -Ocean -Ocelot -Oddball -Odin -Ogre -Ogress -Omega -Omega Red -Omega the Unknown -Omen -Omerta -One Above All -Oneg the Prober -Onslaught -Onyxx -Ooze -Optoman -Oracle -Orator -Orb -Orbit -Orchid -Ord -Order -Orikal -Orka -Ororo Munroe -Orphan -Orphan-Maker -Osiris -Outlaw -Outrage -Overkill -Overmind -Overrider -Owl -Ox -Ozone -Ozymandias -Paibo -Paige Guthrie -Paladin -Paradigm -Paragon -Paralyzer -Paris -Pasco -Paste-Pot Pete -Patch -Pathway -Patriot -Patriot II -Patsy Hellstrom -Patsy Walker -Paul Bailey -Paul Norbert Ebersol -Paul Patterson -Payback -Peace Monger -Peepers -Peggy Carter -Penance -Penance II -Peregrine -Perfection -Perseus -Persuader -Persuasion -Perun -Pete Wisdom -Peter Criss -Peter Noble -Peter Parker -Peter Petruski -Phade -Phage -Phalanx -Phantazia -Phantom Blonde -Phantom Eagle -Phantom Rider -Phastos -Phat -Phil Urich -Philip Fetter -Phineas T. Horton -Phoenix -Photon -Phyla-Vell -Pietro Maximoff -Piledriver -Piotr Rasputin -Pip the Troll -Pipeline -Piper -Piranha -Pisces -Pistol -Pixie -Pixx -Plague -Plantman -Plasma -Plazm -Plug -Plunderer -Pluto -Poison -Polaris -Poltergeist -Porcupine -Portal -Possessor -Postman -Postmortem -Poundcakes -Powderkeg -Power Broker -Power Man -Power Princess -Power Skrull -Powerhouse -Powerpax -Presence -Pressure -Prester John -Pretty Persuasions -Preview -Primal -Prime -Prime Mover -Primevil -Primus -Princess Python -Proctor -Prodigy -Professor Power -Professor X -Projector -Prometheus -Protector -Proteus -Prototype -Prowler -Psi-Lord -Psyche -Psycho-Man -Psyklop -Psylocke -Puck -Puff Adder -Puishannt -Pulse -Puma -Punchout -Punisher -Punisher 2099 -Puppet Master -Purge -Purple Girl -Purple Man -Pyre -Pyro -Quagmire -Quantum -Quasar -Quasar II -Quasimodo -Quentin Beck -Quentin Quire -Quicksand -Quicksilver -Quincy Harker -Raa of the Caves -Rachel Grey -Rachel Summers -Rachel van Helsing -Radian -Radioactive Man -Radion the Atomic Man -Radius -Rafferty -Rage -Raggadorr -Rahne Sinclair -Rainbow -Rama-Tut -Raman -Ramrod -Ramshot -Rancor -Randall Shire -Random -Ranger -Ransak the Reject -Rattler -Ravage 2099 -Raving Beauty -Rawhide Kid -Rax -Raymond Sikorsky -Raza -Razor Fist -Razorback -Reaper -Rebel -Recorder -Red Claw -Red Ghost -Red Guardian -Red Lotus -Red Nine -Red Raven -Red Ronin -Red Shift -Red Skull -Red Skull II -Red Wolf -Redeemer -Redneck -Redwing -Reeva Payge -Reignfire -Reject -Remnant -Remy LeBeau -Reptyl -Revanche -Rex Mundi -Rhiannon -Rhino -Ricadonna -Richard Fisk -Richard Parker -Richard Rider -Rick Jones -Ricochet -Rictor -Rigellian Recorder -Right-Winger -Ringer -Ringleader -Ringmaster -Ringo Kid -Rintrah -Riot -Riot Grrl -Ripfire -Ritchie Gilmore -Rl'nnd -Robbie Robertson -Robert "Bobby" Drake -Robert Bruce Banner -Robert Hunter -Robert Kelly -Robert da Costa -Rock -Rock Python -Rocket Raccoon -Rocket Racer -Rodstvow -Rogue -Rom the Spaceknight -Roma -Romany Wisdom -Ronan the Accuser -Rose -Roughhouse -Roulette -Royal Roy -Ruby Thursday -Ruckus -Rumiko Fujikawa -Rune -Runner -Rush -Rusty Collins -Ruth Bat-Seraph -Ryder -S'byll -S'ym -Sabra -Sabreclaw -Sabretooth -Sack -Sage -Sagittarius -Saint Anna -Saint Elmo -Sally Blevins -Sally Floyd -Salvo -Sam Sawyer -Sam Wilson -Samuel "Starr" Saxon -Samuel Guthrie -Samuel Silke -Samuel Smithers -Sandman -Sangre -Sara Grey -Sasquatch -Satana -Satannish -Saturnyne -Sauron -Savage Steel -Sayge -Scaleface -Scalphunter -Scanner -Scarecrow -Scarecrow II -Scarlet Beetle -Scarlet Centurion -Scarlet Scarab -Scarlet Spider -Scarlet Spiders -Scarlet Witch -Schemer -Scimitar -Scintilla -Scorcher -Scorpia -Scorpio -Scorpion -Scott Summers -Scott Washington -Scourge of the Underworld -Scrambler -Scream -Screaming Mimi -Screech -Scrier -Sea Urchin -Seamus Mellencamp -Sean Cassidy -Sean Garrison -Sebastian Shaw -Seeker -Sekhmet -Selene -Senator Robert Kelly -Senor Muerte -Sentry -Sepulchre -Sergeant Fury -Sergei Kravinoff -Serpentina -Sersi -Set -Seth -Shadow King -Shadow Slasher -Shadow-Hunter -Shadowcat -Shadowmage -Shadrac -Shalla-Bal -Shaman -Shamrock -Shang-Chi -Shanga -Shanna the She-Devil -Shaper of Worlds -Shard -Sharon Carter -Sharon Friedlander -Sharon Ventura -Shathra -Shatter -Shatterfist -Shatterstar -She-Hulk -She-Thing -She-Venom -Shellshock -Shen Kuei -Shi'ar Gladiator -Shinchuko Lotus -Shingen Harada -Shinobi Shaw -Shirow Ishihara -Shiva -Shiver Man -Shocker -Shockwave -Shola Inkosi -Shooting Star -Shotgun -Shriek -Shriker -Shroud -Shrunken Bones -Shuma-Gorath -Sidewinder -Siege -Siena Blaze -Sif -Sigmar -Sigyn -Sikorsky -Silhouette -Silly Seal -Silver -Silver Dagger -Silver Fox -Silver Sable -Silver Samurai -Silver Scorpion -Silver Squire -Silver Surfer -Silverclaw -Silvermane -Simon Williams -Sin -Sin-Eater -Sinister -Sir Steel -Siryn -Sise-Neg -Skein -Skids -Skin -Skinhead -Skull the Slayer -Skullcrusher -Skullfire -Skunge the Laxidazian Troll -Skyhawk -Skywalker -Slab -Slapstick -Sleek -Sleeper -Sleepwalker -Slick -Sligguth -Slipstream -Slither -Sludge -Slug -Sluggo -Sluk -Slyde -Smart Alec -Smartship Friday -Smasher -Smuggler -Smuggler II -Snowbird -Snowfall -Solara -Solarman -Solarr -Soldier X -Solitaire -Solo -Solomon O'Sullivan -Son of Satan -Songbird -Soulfire -Space Phantom -Space Turnip -Specialist -Spectra -Spectral -Speed -Speed Demon -Speedball -Speedo -Spellbinder -Spellcheck -Spencer Smythe -Sphinx -Sphinxor -Spider Doppelganger -Spider-Girl -Spider-Ham -Spider-Man -Spider-Slayer -Spider-Woman -Spidercide -Spike -Spike Freeman -Spinnerette -Spiral -Spirit of '76 -Spitfire -Spoilsport -Spoor -Spot -Sprite -Sputnik -Spyder -Spymaster -Spyne -Squidboy -Squirrel Girl -St. John Allerdyce -Stacy X -Stained Glass Scarlet -Stakar -Stallior -Stanley Stewart -Star Stalker -Star Thief -Star-Dancer -Star-Lord -Starbolt -Stardust -Starfox -Starhawk -Starlight -Starr the Slayer -Starshine -Starstreak -Stature -Steel Raven -Steel Serpent -Steel Spider -Stegron -Stellaris -Stem Cell -Stentor -Stephen Colbert -Stephen Strange -Steve Rogers -Steven Lang -Stevie Hunter -Stick -Stiletto -Stilt-Man -Stinger -Stingray -Stitch -Stone -Stonecutter -Stonewall -Storm -Stranger -Stratosfire -Straw Man -Strobe -Strong Guy -Strongarm -Stryfe -Stunner -Stuntmaster -Stygorr -Stygyro -Styx and Stone -Sub-Mariner -Sugar Man -Suicide -Sultan -Sun Girl -Sunder -Sundragon -Sunfire -Sunpyre -Sunset Bain -Sunspot -Sunstreak -Sunstroke -Sunturion -Super Rabbit -Super Sabre -Super-Adaptoid -Super-Nova -Super-Skrull -SuperPro -Supercharger -Superia -Supernalia -Suprema -Supreme Intelligence -Supremor -Surge -Surtur -Susan Richards -Susan Storm -Sushi -Svarog -Swarm -Sweetface -Swordsman -Sybil Dorn -Sybil Dvorak -Synch -T-Ray -Tabitha Smith -Tag -Tagak the Leopard Lord -Tailhook -Taj Nital -Talia Josephine Wagner -Talisman -Tamara Rahn -Tana Nile -Tantra -Tanya Anderssen -Tarantula -Tarot -Tartarus -Taskmaster -Tatterdemalion -Tattletale -Tattoo -Taurus -Techno -Tefral the Surveyor -Tempest -Tempo -Tempus -Temugin -Tenpin -Termagaira -Terminator -Terminatrix -Terminus -Terrax the Tamer -Terraxia -Terror -Tess-One -Tessa -Tether -Tethlam -Tex Dawson -Texas Twister -Thakos -Thane Ector -Thanos -The Amazing Tanwir Ahmed -The Angel -The Blank -The Destroyer -The Entity -The Grip -The Night Man -The Profile -The Russian -The Stepford Cuckoos -The Symbiote -The Wink -Thena -Theresa Cassidy -Thermo -Thin Man -Thing -Thinker -Thirty-Three -Thog -Thomas Halloway -Thor -Thor Girl -Thornn -Threnody -Thumbelina -Thunderball -Thunderbird -Thunderbolt -Thunderclap -Thunderfist -Thunderstrike -Thundra -Tiboro -Tiger Shark -Tigra -Timberius -Time Bomb -Timeshadow -Timeslip -Tinkerer -Titan -Titania -Titanium Man -Tito Bohusk -Toad -Toad-In-Waiting -Todd Arliss -Tom Cassidy -Tom Corsi -Tom Foster -Tom Thumb -Tomazooma -Tombstone -Tommy -Tommy Lightning -Tomorrow Man -Tony Stark -Topaz -Topspin -Torgo of Mekka -Torgo the Vampire -Toro -Torpedo -Torrent -Torso -Tower -Toxin -Trader -Trapper -Trapster -Tremolo -Trevor Fitzroy -Tri-Man -Triathlon -Trick Shot -Trioccula -Trip Monroe -Triton -Troll -Trump -Tuc -Tugun -Tumbler -Tundra -Turac -Turbo -Turner Century -Turner D. Century -Tusk -Tutinax the Mountain-Mover -Two-Gun Kid -Tyger Tiger -Typeface -Typhoid -Typhoid Mary -Typhon -Tyr -Tyrak -Tyrannosaur -Tyrannus -Tyrant -Tzabaoth -U-Go Girl -U-Man -USAgent -Uatu -Ulik -Ultimo -Ultimus -Ultra-Marine -Ultragirl -Ultron -Ulysses -Umar -Umbo -Uncle Ben Parker -Uni-Mind -Unicorn -Union Jack -Unseen -Unthinnk -Unus the Untouchable -Unuscione -Ursa Major -Urthona -Utgard-Loki -Vagabond -Vague -Vakume -Valentina Allegra de La Fontaine -Valerie Cooper -Valinor -Valkin -Valkyrie -Valtorr -Vamp -Vampire by Night -Vance Astro -Vance Astrovik -Vanguard -Vanisher -Vapor -Vargas -Varnae -Vashti -Vavavoom -Vector -Vegas -Veil -Vengeance -Venom -Venomm -Venus -Venus Dee Milo -Veritas -Vermin -Vertigo -Vesta -Vibraxas -Vibro -Victor Creed -Victor Mancha -Victor Strange -Victor von Doom -Victorius -Vidar -Vincente -Vindaloo -Vindicator -Viper -Virako -Virginia "Pepper" Potts -Virgo -Vishanti -Visimajoris -Vision -Vivisector -Vixen -Volcana -Volla -Volpan -Volstagg -Vulcan -Vulture -Wade Wilson -Wallflower -Walter Newell -Wanda Maximoff -War -War Eagle -War Machine -War V -Warbird -Warhawk -Warlock -Warpath -Warren III Worthington -Warrior Woman -Warstar -Warstrike -Warwolves -Washout -Wasp -Watcher -Water Wizard -Watoomb -Weapon X -Wendell Vaughn -Wendigo -Werewolf by Night -Western Kid -Whiplash -Whirlwind -Whistler -White Fang -White Pilgrim -White Queen -White Rabbit -White Tiger -Whiteout -Whizzer -Wiccan -Wicked -Widget -Wilbur Day -Wild Child -Wild Thing -Wildboys -Wildpride -Wildside -Will o' the Wisp -William Baker -William Stryker -Willie Lumpkin -Wilson Fisk -Wind Dancer -Wind Warrior -Windeagle -Windshear -Winky Man -Winter Soldier -Witchfire -Wiz Kid -Wizard -Wolf -Wolfsbane -Wolverine -Wonder Man -Wong -Woodgod -Worm -Wraith -Wrath -Wreckage -Wrecker -Wundarr the Aquarian -Wyatt Wingfoot -Wysper -X-23 -X-Cutioner -X-Man -X-Ray -X-Treme -Xandu -Xavin -Xemnu the Titan -Xemu -Xi'an Chi Xan -Xorn -Xorr the God-Jewel -Y'Garon -Yandroth -Yellow Claw -Yellowjacket -Yeti -Yith -Ymir -Yondu -Yrial -Yukio -Yukon Jack -Yuri Topolov -Yuriko Oyama -Zabu -Zach -Zaladane -Zarathos -Zarek -Zartra -Zebediah Killgrave -Zeitgeist -Zero -Zero-G -Zeus -Ziggy Pig -Zip-Zap -Zodiak -Zom -Zombie -Zuras -Zzzax -gen Harada -the Living Colossus It -the Living Darkness Null -the Renegade Watcher Aron -the Tomorrow Man Zarrko diff --git a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index 50e05d97985..96c52d9dc8e 100644 --- a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -74,10 +74,10 @@ public class NodeEnvironmentTests extends ESTestCase { } public void testNodeLockSingleEnvironment() throws IOException { - NodeEnvironment env = newNodeEnvironment(Settings.builder() - .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), 1).build()); - Settings settings = env.getSettings(); - List dataPaths = Environment.PATH_DATA_SETTING.get(env.getSettings()); + final Settings settings = buildEnvSettings(Settings.builder() + .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), 1).build()); + NodeEnvironment env = newNodeEnvironment(settings); + List dataPaths = Environment.PATH_DATA_SETTING.get(settings); try { // Reuse the same location and attempt to lock again @@ -121,9 +121,10 @@ public class NodeEnvironmentTests extends ESTestCase { } public void testNodeLockMultipleEnvironment() throws IOException { - final NodeEnvironment first = newNodeEnvironment(); - List dataPaths = Environment.PATH_DATA_SETTING.get(first.getSettings()); - NodeEnvironment second = new NodeEnvironment(first.getSettings(), new Environment(first.getSettings())); + final Settings settings = buildEnvSettings(Settings.EMPTY); + final NodeEnvironment first = newNodeEnvironment(settings); + List dataPaths = Environment.PATH_DATA_SETTING.get(settings); + NodeEnvironment second = new NodeEnvironment(settings, new Environment(settings)); assertEquals(first.nodeDataPaths().length, dataPaths.size()); assertEquals(second.nodeDataPaths().length, dataPaths.size()); for (int i = 0; i < dataPaths.size(); i++) { @@ -388,21 +389,20 @@ public class NodeEnvironmentTests extends ESTestCase { assertThat("index paths uses the regular template", env.indexPaths(index), equalTo(stringsToPaths(dataPaths, "nodes/0/indices/" + index.getUUID()))); - env.close(); - NodeEnvironment env2 = newNodeEnvironment(dataPaths, "/tmp", + IndexSettings s3 = new IndexSettings(s2.getIndexMetaData(), Settings.builder().put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), false).build()); - assertThat(env2.availableShardPaths(sid), equalTo(env2.availableShardPaths(sid))); - assertThat(env2.resolveCustomLocation(s2, sid), equalTo(PathUtils.get("/tmp/foo/" + index.getUUID() + "/0"))); + assertThat(env.availableShardPaths(sid), equalTo(env.availableShardPaths(sid))); + assertThat(env.resolveCustomLocation(s3, sid), equalTo(PathUtils.get("/tmp/foo/" + index.getUUID() + "/0"))); assertThat("shard paths with a custom data_path should contain only regular paths", - env2.availableShardPaths(sid), + env.availableShardPaths(sid), equalTo(stringsToPaths(dataPaths, "nodes/0/indices/" + index.getUUID() + "/0"))); assertThat("index paths uses the regular template", - env2.indexPaths(index), equalTo(stringsToPaths(dataPaths, "nodes/0/indices/" + index.getUUID()))); + env.indexPaths(index), equalTo(stringsToPaths(dataPaths, "nodes/0/indices/" + index.getUUID()))); - env2.close(); + env.close(); } public void testWhetherClusterFolderShouldBeUsed() throws Exception { @@ -495,13 +495,17 @@ public class NodeEnvironmentTests extends ESTestCase { @Override public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException { - Settings build = Settings.builder() - .put(settings) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) - .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build(); + Settings build = buildEnvSettings(settings); return new NodeEnvironment(build, new Environment(build)); } + public Settings buildEnvSettings(Settings settings) { + return Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) + .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()) + .put(settings).build(); + } + public NodeEnvironment newNodeEnvironment(String[] dataPaths, Settings settings) throws IOException { Settings build = Settings.builder() .put(settings) diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java index 749b1621e4d..f3433a9669a 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java @@ -140,7 +140,8 @@ public class ShardPathTests extends ESTestCase { Path[] paths = env.availableShardPaths(shardId); Path path = randomFrom(paths); ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, indexUUID, AllocationId.newInitializing()), path); - ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), indexSettings)); + ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, + IndexSettingsModule.newIndexSettings(shardId.getIndex(), indexSettings, nodeSettings)); boolean found = false; for (Path p : env.nodeDataPaths()) { if (p.equals(shardPath.getRootStatePath())) { diff --git a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java index 5637e937c1f..2dc95f8e9f1 100644 --- a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java +++ b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java @@ -54,13 +54,13 @@ public class InternalSettingsPreparerTests extends ESTestCase { public void testEmptySettings() { Settings settings = InternalSettingsPreparer.prepareSettings(Settings.EMPTY); - assertNotNull(settings.get("node.name")); // a name was set + assertNull(settings.get("node.name")); // a name was not set assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set int size = settings.names().size(); Environment env = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings, null); settings = env.settings(); - assertNotNull(settings.get("node.name")); // a name was set + assertNull(settings.get("node.name")); // a name was not set assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set assertEquals(settings.toString(), size + 1 /* path.home is in the base settings */, settings.names().size()); String home = Environment.PATH_HOME_SETTING.get(baseEnvSettings); diff --git a/docs/reference/migration/migrate_5_0/settings.asciidoc b/docs/reference/migration/migrate_5_0/settings.asciidoc index da0a5776ad0..76ee65a6abe 100644 --- a/docs/reference/migration/migrate_5_0/settings.asciidoc +++ b/docs/reference/migration/migrate_5_0/settings.asciidoc @@ -28,6 +28,9 @@ The `name` setting has been removed and is replaced by `node.name`. Usage of The `node.add_id_to_custom_path` was renamed to `add_lock_id_to_custom_path`. +The default for the `node.name` settings is now the first 7 charachters of the node id, +which is in turn a randomly generated UUID. + The settings `node.mode` and `node.local` are removed. Local mode should be configured via `discovery.type: local` and `transport.type:local`. In order to disable _http_ please use `http.enabled: false` diff --git a/docs/reference/setup/important-settings.asciidoc b/docs/reference/setup/important-settings.asciidoc index e3be0b780c4..aecd205b613 100644 --- a/docs/reference/setup/important-settings.asciidoc +++ b/docs/reference/setup/important-settings.asciidoc @@ -70,9 +70,9 @@ environments, otherwise you might end up with nodes joining the wrong cluster. [[node.name]] === `node.name` -By default, Elasticsearch will randomly pick a descriptive `node.name` from a -list of around 3000 Marvel characters when your node starts up, but this also -means that the `node.name` will change the next time the node restarts. +By default, Elasticsearch will take the 7 first charachter of the randomly generated uuid used as the node id. +Note that the node id is persisted and does not change when a node restarts and therefore the default node name +will also not change. It is worth configuring a more meaningful name which will also have the advantage of persisting after restarting the node: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml index 7f724831bd8..e09da2a6557 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml @@ -164,7 +164,7 @@ (\d+ \s+) #always should return value since we filter out non data nodes by default [-\w.]+ \s+ \d+(\.\d+){3} \s+ - \w.* + [-\w.]+ \n )+ $/ diff --git a/test/framework/src/main/java/org/elasticsearch/node/NodeTests.java b/test/framework/src/main/java/org/elasticsearch/node/NodeTests.java new file mode 100644 index 00000000000..db8b6825ec8 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/node/NodeTests.java @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.node; + +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.InternalTestCluster; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; + +public class NodeTests extends ESTestCase { + + public void testNodeName() throws IOException { + final Path tempDir = createTempDir(); + final String name = randomBoolean() ? randomAsciiOfLength(10) : null; + Settings.Builder settings = Settings.builder() + .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName("single-node-cluster", randomLong())) + .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) + .put(NetworkModule.HTTP_ENABLED.getKey(), false) + .put("discovery.type", "local") + .put("transport.type", "local") + .put(Node.NODE_DATA_SETTING.getKey(), true); + if (name != null) { + settings.put(Node.NODE_NAME_SETTING.getKey(), name); + } + try (Node node = new MockNode(settings.build(), Collections.emptyList())) { + final Settings nodeSettings = randomBoolean() ? node.settings() : node.getEnvironment().settings(); + if (name == null) { + assertThat(Node.NODE_NAME_SETTING.get(nodeSettings), equalTo(node.getNodeEnvironment().nodeId().substring(0, 7))); + } else { + assertThat(Node.NODE_NAME_SETTING.get(nodeSettings), equalTo(name)); + } + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java index 789eb693f7f..dcb91ed441e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java +++ b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java @@ -53,17 +53,21 @@ public class IndexSettingsModule extends AbstractModule { } public static IndexSettings newIndexSettings(Index index, Settings settings, Setting... setting) { + return newIndexSettings(index, settings, Settings.EMPTY, setting); + } + + public static IndexSettings newIndexSettings(Index index, Settings indexSetting, Settings nodeSettings, Setting... setting) { Settings build = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(settings) + .put(indexSetting) .build(); IndexMetaData metaData = IndexMetaData.builder(index.getName()).settings(build).build(); Set> settingSet = new HashSet<>(IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); if (setting.length > 0) { settingSet.addAll(Arrays.asList(setting)); } - return new IndexSettings(metaData, Settings.EMPTY, (idx) -> Regex.simpleMatch(idx, metaData.getIndex().getName()), new IndexScopedSettings(Settings.EMPTY, settingSet)); + return new IndexSettings(metaData, nodeSettings, (idx) -> Regex.simpleMatch(idx, metaData.getIndex().getName()), new IndexScopedSettings(Settings.EMPTY, settingSet)); } public static IndexSettings newIndexSettings(Index index, Settings settings, IndexScopedSettings indexScopedSettings) { From 03fbc91816381aaf856327dd02e8bf4b2ba3eb8d Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Sun, 24 Jul 2016 09:02:30 +0200 Subject: [PATCH 49/93] allow for a `-` in a node name --- .../test/cat.allocation/10_basic.yaml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml index e09da2a6557..5f0646139b5 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml @@ -34,7 +34,7 @@ (\d+ \s+)? #no value from client nodes [-\w.]+ \s+ \d+(\.\d+){3} \s+ - \w.* + [-\w]+ \n )+ $/ @@ -61,7 +61,7 @@ (\d+ \s+) #always should return value since we filter out non data nodes by default [-\w.]+ \s+ \d+(\.\d+){3} \s+ - \w.* + [-\w]+ \n )+ ( @@ -90,7 +90,7 @@ (\d+ \s+)? #no value from client nodes [-\w.]+ \s+ \d+(\.\d+){3} \s+ - \w.* + [-\w]+ \n ) $/ @@ -124,7 +124,7 @@ (\d+ \s+)? #no value from client nodes [-\w.]+ \s+ \d+(\.\d+){3} \s+ - \w.* + [-\w]+ \n )+ ( @@ -164,7 +164,7 @@ (\d+ \s+) #always should return value since we filter out non data nodes by default [-\w.]+ \s+ \d+(\.\d+){3} \s+ - [-\w.]+ + [-\w]+ \n )+ $/ @@ -181,7 +181,7 @@ $body: | /^ ( \d* \s+ - \w.* + [-\w]+ \n )+ $/ @@ -199,7 +199,7 @@ \n ( \s+\d* \s+ - \w.* + [-\w]+ \n )+ $/ @@ -224,7 +224,7 @@ (\d+ \s+) #always should return value since we filter out non data nodes by default [-\w.]+ \s+ \d+(\.\d+){3} \s+ - \w.* + [-\w]+ \n )+ $/ From b62ec1d300f24b22e1e5deacef3bb0ae206724d1 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Mon, 25 Jul 2016 16:18:04 +0200 Subject: [PATCH 50/93] Remove TODO about Timeout in Azure In #15950 #15080 #16084 we added the support of TimeOut for Requests with a default client`setTimeoutIntervalInMs`. So we can remove this useless todo which was added for only one method. Closes #18617. --- .../cloud/azure/storage/AzureStorageServiceImpl.java | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java index 17ff0780a50..4e5dfb3efd5 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java @@ -27,15 +27,12 @@ import com.microsoft.azure.storage.blob.CloudBlobClient; import com.microsoft.azure.storage.blob.CloudBlobContainer; import com.microsoft.azure.storage.blob.CloudBlockBlob; import com.microsoft.azure.storage.blob.ListBlobItem; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.RepositoryException; @@ -163,13 +160,6 @@ public class AzureStorageServiceImpl extends AbstractComponent implements AzureS public void removeContainer(String account, LocationMode mode, String container) throws URISyntaxException, StorageException { CloudBlobClient client = this.getSelectedClient(account, mode); CloudBlobContainer blobContainer = client.getContainerReference(container); - // TODO Should we set some timeout and retry options? - /* - BlobRequestOptions options = new BlobRequestOptions(); - options.setTimeoutIntervalInMs(1000); - options.setRetryPolicyFactory(new RetryNoRetry()); - blobContainer.deleteIfExists(options, null); - */ logger.trace("removing container [{}]", container); blobContainer.deleteIfExists(); } From 33461a843219ca897fee606670e3ed2dc0c88748 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Mon, 25 Jul 2016 16:20:59 +0200 Subject: [PATCH 51/93] Rename FieldDataFieldsContext and FieldDataFieldsFetchSubPhase in DocValueFieldsContext and DocValueFieldsFetchSubPhase This change renames the package org.elasticsearch.search.fetch.fielddata in org.elasticsearch.search.fetch.docvalues and renames the FieldData* classes in DocValue*. This is a follow up of the renaming that happened in #18943 --- .../index/query/InnerHitBuilder.java | 12 +++++----- .../elasticsearch/search/SearchModule.java | 4 ++-- .../elasticsearch/search/SearchService.java | 13 +++++----- .../tophits/TopHitsAggregatorFactory.java | 24 +++++++++---------- .../search/fetch/FetchSubPhaseContext.java | 4 +++- .../DocValueFieldsContext.java} | 18 +++++++------- .../DocValueFieldsFetchSubPhase.java} | 21 ++++++++-------- 7 files changed, 48 insertions(+), 48 deletions(-) rename core/src/main/java/org/elasticsearch/search/fetch/{fielddata/FieldDataFieldsContext.java => docvalues/DocValueFieldsContext.java} (71%) rename core/src/main/java/org/elasticsearch/search/fetch/{fielddata/FieldDataFieldsFetchSubPhase.java => docvalues/DocValueFieldsFetchSubPhase.java} (76%) diff --git a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index 04b8e071f5d..3dd7bd0bcc8 100644 --- a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -34,8 +34,8 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; -import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext; -import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase; +import org.elasticsearch.search.fetch.docvalues.DocValueFieldsContext; +import org.elasticsearch.search.fetch.docvalues.DocValueFieldsFetchSubPhase; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilder; @@ -574,12 +574,12 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl } } if (docValueFields != null) { - FieldDataFieldsContext fieldDataFieldsContext = innerHitsContext - .getFetchSubPhaseContext(FieldDataFieldsFetchSubPhase.CONTEXT_FACTORY); + DocValueFieldsContext docValueFieldsContext = innerHitsContext + .getFetchSubPhaseContext(DocValueFieldsFetchSubPhase.CONTEXT_FACTORY); for (String field : docValueFields) { - fieldDataFieldsContext.add(new FieldDataFieldsContext.FieldDataField(field)); + docValueFieldsContext.add(new DocValueFieldsContext.DocValueField(field)); } - fieldDataFieldsContext.setHitExecutionNeeded(true); + docValueFieldsContext.setHitExecutionNeeded(true); } if (scriptFields != null) { for (ScriptField field : scriptFields) { diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 06f1ab1f1ad..c6ce4a040f3 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -246,7 +246,7 @@ import org.elasticsearch.search.controller.SearchPhaseController; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase; -import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase; +import org.elasticsearch.search.fetch.docvalues.DocValueFieldsFetchSubPhase; import org.elasticsearch.search.fetch.matchedqueries.MatchedQueriesFetchSubPhase; import org.elasticsearch.search.fetch.parent.ParentFieldSubFetchPhase; import org.elasticsearch.search.fetch.script.ScriptFieldsFetchSubPhase; @@ -792,7 +792,7 @@ public class SearchModule extends AbstractModule { private void registerFetchSubPhases(List plugins) { registerFetchSubPhase(new ExplainFetchSubPhase()); - registerFetchSubPhase(new FieldDataFieldsFetchSubPhase()); + registerFetchSubPhase(new DocValueFieldsFetchSubPhase()); registerFetchSubPhase(new ScriptFieldsFetchSubPhase()); registerFetchSubPhase(new FetchSourceSubPhase()); registerFetchSubPhase(new VersionFetchSubPhase()); diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 976e0cb7939..bfcfcb9d4c8 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; -import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; @@ -67,9 +66,9 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchRequest; -import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext; -import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext.FieldDataField; -import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase; +import org.elasticsearch.search.fetch.docvalues.DocValueFieldsContext; +import org.elasticsearch.search.fetch.docvalues.DocValueFieldsContext.DocValueField; +import org.elasticsearch.search.fetch.docvalues.DocValueFieldsFetchSubPhase; import org.elasticsearch.search.fetch.script.ScriptFieldsContext.ScriptField; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.internal.DefaultSearchContext; @@ -726,11 +725,11 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv context.fetchSourceContext(source.fetchSource()); } if (source.docValueFields() != null) { - FieldDataFieldsContext fieldDataFieldsContext = context.getFetchSubPhaseContext(FieldDataFieldsFetchSubPhase.CONTEXT_FACTORY); + DocValueFieldsContext docValuesFieldsContext = context.getFetchSubPhaseContext(DocValueFieldsFetchSubPhase.CONTEXT_FACTORY); for (String field : source.docValueFields()) { - fieldDataFieldsContext.add(new FieldDataField(field)); + docValuesFieldsContext.add(new DocValueField(field)); } - fieldDataFieldsContext.setHitExecutionNeeded(true); + docValuesFieldsContext.setHitExecutionNeeded(true); } if (source.highlighter() != null) { HighlightBuilder highlightBuilder = source.highlighter(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java index 65a8c24eb08..a3614889bdf 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java @@ -28,9 +28,9 @@ import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; -import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext; -import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext.FieldDataField; -import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase; +import org.elasticsearch.search.fetch.docvalues.DocValueFieldsContext; +import org.elasticsearch.search.fetch.docvalues.DocValueFieldsContext.DocValueField; +import org.elasticsearch.search.fetch.docvalues.DocValueFieldsFetchSubPhase; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SubSearchContext; @@ -54,12 +54,12 @@ public class TopHitsAggregatorFactory extends AggregatorFactory> sorts; private final HighlightBuilder highlightBuilder; private final List fieldNames; - private final List fieldDataFields; + private final List docValueFields; private final Set scriptFields; private final FetchSourceContext fetchSourceContext; public TopHitsAggregatorFactory(String name, Type type, int from, int size, boolean explain, boolean version, boolean trackScores, - List> sorts, HighlightBuilder highlightBuilder, List fieldNames, List fieldDataFields, + List> sorts, HighlightBuilder highlightBuilder, List fieldNames, List docValueFields, Set scriptFields, FetchSourceContext fetchSourceContext, AggregationContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactories, Map metaData) throws IOException { super(name, type, context, parent, subFactories, metaData); @@ -71,7 +71,7 @@ public class TopHitsAggregatorFactory extends AggregatorFactory fields = new ArrayList<>(); + private List fields = new ArrayList<>(); - public FieldDataFieldsContext() { + public DocValueFieldsContext() { } - public void add(FieldDataField field) { + public void add(DocValueField field) { this.fields.add(field); } - public List fields() { + public List fields() { return this.fields; } } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/docvalues/DocValueFieldsFetchSubPhase.java similarity index 76% rename from core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsFetchSubPhase.java rename to core/src/main/java/org/elasticsearch/search/fetch/docvalues/DocValueFieldsFetchSubPhase.java index 81907d0b906..9946920bc62 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/fielddata/FieldDataFieldsFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/docvalues/DocValueFieldsFetchSubPhase.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.fetch.fielddata; +package org.elasticsearch.search.fetch.docvalues; import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; @@ -30,24 +30,23 @@ import java.util.ArrayList; import java.util.HashMap; /** - * Query sub phase which pulls data from field data (using the cache if - * available, building it if not). + * Query sub phase which pulls data from doc values * - * Specifying {@code "fielddata_fields": ["field1", "field2"]} + * Specifying {@code "docvalue_fields": ["field1", "field2"]} */ -public final class FieldDataFieldsFetchSubPhase implements FetchSubPhase { +public final class DocValueFieldsFetchSubPhase implements FetchSubPhase { - public static final String[] NAMES = {"fielddata_fields", "fielddataFields"}; - public static final ContextFactory CONTEXT_FACTORY = new ContextFactory() { + public static final String NAME = "docvalue_fields"; + public static final ContextFactory CONTEXT_FACTORY = new ContextFactory() { @Override public String getName() { - return NAMES[0]; + return NAME; } @Override - public FieldDataFieldsContext newContextInstance() { - return new FieldDataFieldsContext(); + public DocValueFieldsContext newContextInstance() { + return new DocValueFieldsContext(); } }; @@ -56,7 +55,7 @@ public final class FieldDataFieldsFetchSubPhase implements FetchSubPhase { if (context.getFetchSubPhaseContext(CONTEXT_FACTORY).hitExecutionNeeded() == false) { return; } - for (FieldDataFieldsContext.FieldDataField field : context.getFetchSubPhaseContext(CONTEXT_FACTORY).fields()) { + for (DocValueFieldsContext.DocValueField field : context.getFetchSubPhaseContext(CONTEXT_FACTORY).fields()) { if (hitContext.hit().fieldsOrNull() == null) { hitContext.hit().fields(new HashMap<>(2)); } From f745c969493ef2f3bbb136092b01fa06a356bb92 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 8 Jul 2016 16:02:29 +0200 Subject: [PATCH 52/93] Clean up more messy tests After #13834 many tests that used Groovy scripts (for good or bad reason) in their tests have been moved in the lang-groovy module and the issue #13837 has been created to track these messy tests in order to clean them up. This commit moves more tests back in core, removes the dependency on Groovy, changes the scripts in order to use the mocked script engine, and change the tests to integration tests. --- .../resources/checkstyle_suppressions.xml | 6 - .../action/bulk/BulkWithUpdatesIT.java | 359 +++++++++++++----- .../AggregationTestScriptsPlugin.java | 38 +- .../aggregations/bucket/DoubleTermsIT.java | 97 +++-- .../aggregations/bucket/LongTermsIT.java | 81 ++-- .../aggregations/bucket/MinDocCountIT.java | 77 +++- .../aggregations/bucket/StringTermsIT.java | 116 ++++-- .../aggregations/metrics/ExtendedStatsIT.java | 60 ++- .../search/aggregations/metrics/MaxIT.java | 63 ++- .../search/aggregations/metrics/MinIT.java | 67 ++-- .../search/aggregations/metrics/StatsIT.java | 70 ++-- .../search/functionscore/FunctionScoreIT.java | 88 +++-- .../messy/tests/package-info.java | 8 - .../script/MockScriptEngine.java | 9 +- 14 files changed, 806 insertions(+), 333 deletions(-) rename modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java => core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java (71%) rename modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java => core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java (92%) rename modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java => core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java (94%) rename modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java => core/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java (85%) rename modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java => core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java (93%) rename modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java => core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java (92%) rename modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MaxTests.java => core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java (85%) rename modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinTests.java => core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java (86%) rename modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java => core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java (89%) rename modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java => core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java (76%) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index de5fe339a30..b629abbe493 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -1106,18 +1106,12 @@ - - - - - - diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java similarity index 71% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java rename to core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java index cda5ba2161e..5007d5c830f 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java @@ -17,13 +17,9 @@ * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.action.bulk; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.bulk.BulkItemResponse; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; @@ -38,17 +34,21 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.script.ScriptException; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import java.util.concurrent.CyclicBarrier; +import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.script.ScriptService.ScriptType; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @@ -58,10 +58,56 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -public class BulkTests extends ESIntegTestCase { +public class BulkWithUpdatesIT extends ESIntegTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put("script.default_lang", CustomScriptPlugin.NAME) + .build(); + } + @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + @SuppressWarnings("unchecked") + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("ctx._source.field += 1", vars -> srcScript(vars, source -> { + Integer field = (Integer) source.get("field"); + return source.replace("field", field + 1); + })); + + scripts.put("ctx._source.counter += 1", vars -> srcScript(vars, source -> { + Integer counter = (Integer) source.get("counter"); + return source.replace("counter", counter + 1); + })); + + scripts.put("ctx._source.field2 = 'value2'", vars -> srcScript(vars, source -> source.replace("field2", "value2"))); + + scripts.put("throw script exception on unknown var", vars -> { + throw new ScriptException("message", null, Collections.emptyList(), "exception on unknown var", CustomScriptPlugin.NAME); + }); + + scripts.put("ctx.op = \"none\"", vars -> ((Map) vars.get("ctx")).put("op", "none")); + scripts.put("ctx.op = \"delete\"", vars -> ((Map) vars.get("ctx")).put("op", "delete")); + return scripts; + } + + @SuppressWarnings("unchecked") + static Object srcScript(Map vars, Function, Object> f) { + Map ctx = (Map) vars.get("ctx"); + + Map source = (Map) ctx.get("_source"); + return f.apply(source); + } } public void testBulkUpdateSimple() throws Exception { @@ -82,25 +128,26 @@ public class BulkTests extends ESIntegTestCase { assertThat(bulkItemResponse.getIndex(), equalTo("test")); } + final Script script = new Script("ctx._source.field += 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + bulkResponse = client().prepareBulk() - .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("1") - .setScript(new Script("ctx._source.field += 1", ScriptService.ScriptType.INLINE, null, null))) - .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("2") - .setScript(new Script("ctx._source.field += 1", ScriptService.ScriptType.INLINE, null, null)).setRetryOnConflict(3)) + .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("1").setScript(script)) + .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("2").setScript(script).setRetryOnConflict(3)) .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("3") - .setDoc(jsonBuilder().startObject().field("field1", "test").endObject())).execute().actionGet(); + .setDoc(jsonBuilder().startObject().field("field1", "test").endObject())) + .get(); assertThat(bulkResponse.hasFailures(), equalTo(false)); assertThat(bulkResponse.getItems().length, equalTo(3)); for (BulkItemResponse bulkItemResponse : bulkResponse) { assertThat(bulkItemResponse.getIndex(), equalTo("test")); } - assertThat(((UpdateResponse) bulkResponse.getItems()[0].getResponse()).getId(), equalTo("1")); - assertThat(((UpdateResponse) bulkResponse.getItems()[0].getResponse()).getVersion(), equalTo(2L)); - assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getId(), equalTo("2")); - assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(2L)); - assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getId(), equalTo("3")); - assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getVersion(), equalTo(2L)); + assertThat(bulkResponse.getItems()[0].getResponse().getId(), equalTo("1")); + assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(2L)); + assertThat(bulkResponse.getItems()[1].getResponse().getId(), equalTo("2")); + assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(2L)); + assertThat(bulkResponse.getItems()[2].getResponse().getId(), equalTo("3")); + assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(2L)); GetResponse getResponse = client().prepareGet().setIndex("test").setType("type1").setId("1").setFields("field").execute() .actionGet(); @@ -120,26 +167,23 @@ public class BulkTests extends ESIntegTestCase { bulkResponse = client() .prepareBulk() - .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("6") - .setScript(new Script("ctx._source.field += 1", ScriptService.ScriptType.INLINE, null, null)) + .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("6").setScript(script) .setUpsert(jsonBuilder().startObject().field("field", 0).endObject())) - .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("7") - .setScript(new Script("ctx._source.field += 1", ScriptService.ScriptType.INLINE, null, null))) - .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("2") - .setScript(new Script("ctx._source.field += 1", ScriptService.ScriptType.INLINE, null, null))).execute() - .actionGet(); + .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("7").setScript(script)) + .add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("2").setScript(script)) + .get(); assertThat(bulkResponse.hasFailures(), equalTo(true)); assertThat(bulkResponse.getItems().length, equalTo(3)); - assertThat(((UpdateResponse) bulkResponse.getItems()[0].getResponse()).getId(), equalTo("6")); - assertThat(((UpdateResponse) bulkResponse.getItems()[0].getResponse()).getVersion(), equalTo(1L)); + assertThat(bulkResponse.getItems()[0].getResponse().getId(), equalTo("6")); + assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(1L)); assertThat(bulkResponse.getItems()[1].getResponse(), nullValue()); assertThat(bulkResponse.getItems()[1].getFailure().getIndex(), equalTo("test")); assertThat(bulkResponse.getItems()[1].getFailure().getId(), equalTo("7")); assertThat(bulkResponse.getItems()[1].getFailure().getMessage(), containsString("document missing")); - assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getId(), equalTo("2")); - assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getIndex(), equalTo("test")); - assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getVersion(), equalTo(3L)); + assertThat(bulkResponse.getItems()[2].getResponse().getId(), equalTo("2")); + assertThat(bulkResponse.getItems()[2].getResponse().getIndex(), equalTo("test")); + assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(3L)); getResponse = client().prepareGet().setIndex("test").setType("type1").setId("6").setFields("field").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); @@ -164,11 +208,11 @@ public class BulkTests extends ESIntegTestCase { .add(client().prepareIndex("test", "type", "1").setSource("field", "2")).get(); assertTrue(((IndexResponse) bulkResponse.getItems()[0].getResponse()).isCreated()); - assertThat(((IndexResponse) bulkResponse.getItems()[0].getResponse()).getVersion(), equalTo(1L)); + assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(1L)); assertTrue(((IndexResponse) bulkResponse.getItems()[1].getResponse()).isCreated()); - assertThat(((IndexResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(1L)); + assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(1L)); assertFalse(((IndexResponse) bulkResponse.getItems()[2].getResponse()).isCreated()); - assertThat(((IndexResponse) bulkResponse.getItems()[2].getResponse()).getVersion(), equalTo(2L)); + assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(2L)); bulkResponse = client().prepareBulk() .add(client().prepareUpdate("test", "type", "1").setVersion(4L).setDoc("field", "2")) @@ -176,29 +220,37 @@ public class BulkTests extends ESIntegTestCase { .add(client().prepareUpdate("test", "type", "1").setVersion(2L).setDoc("field", "3")).get(); assertThat(bulkResponse.getItems()[0].getFailureMessage(), containsString("version conflict")); - assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(2L)); - assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getVersion(), equalTo(3L)); + assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(2L)); + assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(3L)); bulkResponse = client().prepareBulk() - .add(client().prepareIndex("test", "type", "e1").setSource("field", "1").setVersion(10).setVersionType(VersionType.EXTERNAL)) - .add(client().prepareIndex("test", "type", "e2").setSource("field", "1").setVersion(10).setVersionType(VersionType.EXTERNAL)) - .add(client().prepareIndex("test", "type", "e1").setSource("field", "2").setVersion(12).setVersionType(VersionType.EXTERNAL)).get(); + .add(client().prepareIndex("test", "type", "e1") + .setSource("field", "1").setVersion(10).setVersionType(VersionType.EXTERNAL)) + .add(client().prepareIndex("test", "type", "e2") + .setSource("field", "1").setVersion(10).setVersionType(VersionType.EXTERNAL)) + .add(client().prepareIndex("test", "type", "e1") + .setSource("field", "2").setVersion(12).setVersionType(VersionType.EXTERNAL)) + .get(); assertTrue(((IndexResponse) bulkResponse.getItems()[0].getResponse()).isCreated()); - assertThat(((IndexResponse) bulkResponse.getItems()[0].getResponse()).getVersion(), equalTo(10L)); + assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(10L)); assertTrue(((IndexResponse) bulkResponse.getItems()[1].getResponse()).isCreated()); - assertThat(((IndexResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(10L)); + assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(10L)); assertFalse(((IndexResponse) bulkResponse.getItems()[2].getResponse()).isCreated()); - assertThat(((IndexResponse) bulkResponse.getItems()[2].getResponse()).getVersion(), equalTo(12L)); + assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(12L)); bulkResponse = client().prepareBulk() - .add(client().prepareUpdate("test", "type", "e1").setDoc("field", "2").setVersion(10)) // INTERNAL - .add(client().prepareUpdate("test", "type", "e1").setDoc("field", "3").setVersion(20).setVersionType(VersionType.FORCE)) - .add(client().prepareUpdate("test", "type", "e1").setDoc("field", "4").setVersion(20).setVersionType(VersionType.INTERNAL)).get(); + .add(client().prepareUpdate("test", "type", "e1") + .setDoc("field", "2").setVersion(10)) // INTERNAL + .add(client().prepareUpdate("test", "type", "e1") + .setDoc("field", "3").setVersion(20).setVersionType(VersionType.FORCE)) + .add(client().prepareUpdate("test", "type", "e1") + .setDoc("field", "4").setVersion(20).setVersionType(VersionType.INTERNAL)) + .get(); assertThat(bulkResponse.getItems()[0].getFailureMessage(), containsString("version conflict")); - assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(20L)); - assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getVersion(), equalTo(21L)); + assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(20L)); + assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(21L)); } public void testBulkUpdateMalformedScripts() throws Exception { @@ -215,12 +267,12 @@ public class BulkTests extends ESIntegTestCase { assertThat(bulkResponse.getItems().length, equalTo(3)); bulkResponse = client().prepareBulk() - .add(client().prepareUpdate().setIndex("test").setType("type1").setId("1") - .setScript(new Script("ctx._source.field += a", ScriptService.ScriptType.INLINE, null, null)).setFields("field")) - .add(client().prepareUpdate().setIndex("test").setType("type1").setId("2") - .setScript(new Script("ctx._source.field += 1", ScriptService.ScriptType.INLINE, null, null)).setFields("field")) - .add(client().prepareUpdate().setIndex("test").setType("type1").setId("3") - .setScript(new Script("ctx._source.field += a", ScriptService.ScriptType.INLINE, null, null)).setFields("field")) + .add(client().prepareUpdate().setIndex("test").setType("type1").setId("1").setFields("field") + .setScript(new Script("throw script exception on unknown var", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .add(client().prepareUpdate().setIndex("test").setType("type1").setId("2").setFields("field") + .setScript(new Script("ctx._source.field += 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .add(client().prepareUpdate().setIndex("test").setType("type1").setId("3").setFields("field") + .setScript(new Script("throw script exception on unknown var", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) .execute().actionGet(); assertThat(bulkResponse.hasFailures(), equalTo(true)); @@ -229,10 +281,9 @@ public class BulkTests extends ESIntegTestCase { assertThat(bulkResponse.getItems()[0].getFailure().getMessage(), containsString("failed to execute script")); assertThat(bulkResponse.getItems()[0].getResponse(), nullValue()); - assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getId(), equalTo("2")); - assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(2L)); - assertThat(((Integer) ((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().field("field").getValue()), - equalTo(2)); + assertThat(bulkResponse.getItems()[1].getResponse().getId(), equalTo("2")); + assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(2L)); + assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().field("field").getValue(), equalTo(2)); assertThat(bulkResponse.getItems()[1].getFailure(), nullValue()); assertThat(bulkResponse.getItems()[2].getFailure().getId(), equalTo("3")); @@ -248,17 +299,19 @@ public class BulkTests extends ESIntegTestCase { if (numDocs % 2 == 1) { numDocs++; // this test needs an even num of docs } - logger.info("Bulk-Indexing {} docs", numDocs); + + final Script script = new Script("ctx._source.counter += 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + BulkRequestBuilder builder = client().prepareBulk(); for (int i = 0; i < numDocs; i++) { builder.add( client().prepareUpdate() - .setIndex("test").setType("type1").setId(Integer.toString(i)) - .setScript(new Script("ctx._source.counter += 1", ScriptService.ScriptType.INLINE, null, null)).setFields("counter") - .setUpsert(jsonBuilder().startObject().field("counter", 1).endObject())); + .setIndex("test").setType("type1").setId(Integer.toString(i)).setFields("counter") + .setScript(script) + .setUpsert(jsonBuilder().startObject().field("counter", 1).endObject())); } - BulkResponse response = builder.execute().actionGet(); + BulkResponse response = builder.get(); assertThat(response.hasFailures(), equalTo(false)); assertThat(response.getItems().length, equalTo(numDocs)); for (int i = 0; i < numDocs; i++) { @@ -267,10 +320,9 @@ public class BulkTests extends ESIntegTestCase { assertThat(response.getItems()[i].getIndex(), equalTo("test")); assertThat(response.getItems()[i].getType(), equalTo("type1")); assertThat(response.getItems()[i].getOpType(), equalTo("update")); - assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getId(), equalTo(Integer.toString(i))); - assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getVersion(), equalTo(1L)); - assertThat(((Integer) ((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue()), - equalTo(1)); + assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i))); + assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(1L)); + assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue(), equalTo(1)); for (int j = 0; j < 5; j++) { GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).setFields("counter").execute() @@ -286,7 +338,7 @@ public class BulkTests extends ESIntegTestCase { UpdateRequestBuilder updateBuilder = client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i)) .setFields("counter"); if (i % 2 == 0) { - updateBuilder.setScript(new Script("ctx._source.counter += 1", ScriptService.ScriptType.INLINE, null, null)); + updateBuilder.setScript(script); } else { updateBuilder.setDoc(jsonBuilder().startObject().field("counter", 2).endObject()); } @@ -306,17 +358,15 @@ public class BulkTests extends ESIntegTestCase { assertThat(response.getItems()[i].getIndex(), equalTo("test")); assertThat(response.getItems()[i].getType(), equalTo("type1")); assertThat(response.getItems()[i].getOpType(), equalTo("update")); - assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getId(), equalTo(Integer.toString(i))); - assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getVersion(), equalTo(2L)); - assertThat(((Integer) ((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue()), - equalTo(2)); + assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i))); + assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(2L)); + assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue(), equalTo(2)); } builder = client().prepareBulk(); int maxDocs = numDocs / 2 + numDocs; for (int i = (numDocs / 2); i < maxDocs; i++) { - builder.add(client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i)) - .setScript(new Script("ctx._source.counter += 1", ScriptService.ScriptType.INLINE, null, null))); + builder.add(client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i)).setScript(script)); } response = builder.execute().actionGet(); assertThat(response.hasFailures(), equalTo(true)); @@ -338,7 +388,7 @@ public class BulkTests extends ESIntegTestCase { builder = client().prepareBulk(); for (int i = 0; i < numDocs; i++) { builder.add(client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i)) - .setScript(new Script("ctx.op = \"none\"", ScriptService.ScriptType.INLINE, null, null))); + .setScript(new Script("ctx.op = \"none\"", ScriptType.INLINE, CustomScriptPlugin.NAME, null))); } response = builder.execute().actionGet(); assertThat(response.buildFailureMessage(), response.hasFailures(), equalTo(false)); @@ -354,7 +404,7 @@ public class BulkTests extends ESIntegTestCase { builder = client().prepareBulk(); for (int i = 0; i < numDocs; i++) { builder.add(client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i)) - .setScript(new Script("ctx.op = \"delete\"", ScriptService.ScriptType.INLINE, null, null))); + .setScript(new Script("ctx.op = \"delete\"", ScriptType.INLINE, CustomScriptPlugin.NAME, null))); } response = builder.execute().actionGet(); assertThat(response.hasFailures(), equalTo(false)); @@ -416,11 +466,38 @@ public class BulkTests extends ESIntegTestCase { BulkRequestBuilder builder = client().prepareBulk(); - byte[] addParent = new BytesArray("{\"index\" : { \"_index\" : \"test\", \"_type\" : \"parent\", \"_id\" : \"parent1\"}}\n" + - "{\"field1\" : \"value1\"}\n").array(); + // It's important to use JSON parsing here and request objects: issue 3444 is related to incomplete option parsing + byte[] addParent = new BytesArray( + "{" + + " \"index\" : {" + + " \"_index\" : \"test\"," + + " \"_type\" : \"parent\"," + + " \"_id\" : \"parent1\"" + + " }" + + "}" + + "\n" + + "{" + + " \"field1\" : \"value1\"" + + "}" + + "\n").array(); - byte[] addChild = new BytesArray("{ \"update\" : { \"_index\" : \"test\", \"_type\" : \"child\", \"_id\" : \"child1\", \"parent\" : \"parent1\"}}\n" + - "{\"doc\" : { \"field1\" : \"value1\"}, \"doc_as_upsert\" : \"true\"}\n").array(); + byte[] addChild = new BytesArray( + "{" + + " \"update\" : {" + + " \"_index\" : \"test\"," + + " \"_type\" : \"child\"," + + " \"_id\" : \"child1\"," + + " \"parent\" : \"parent1\"" + + " }" + + "}" + + "\n" + + "{" + + " \"doc\" : {" + + " \"field1\" : \"value1\"" + + " }," + + " \"doc_as_upsert\" : \"true\"" + + "}" + + "\n").array(); builder.add(addParent, 0, addParent.length); builder.add(addChild, 0, addChild.length); @@ -452,14 +529,57 @@ public class BulkTests extends ESIntegTestCase { BulkRequestBuilder builder = client().prepareBulk(); - byte[] addParent = new BytesArray("{\"index\" : { \"_index\" : \"test\", \"_type\" : \"parent\", \"_id\" : \"parent1\"}}\n" + - "{\"field1\" : \"value1\"}\n").array(); + byte[] addParent = new BytesArray( + "{" + + " \"index\" : {" + + " \"_index\" : \"test\"," + + " \"_type\" : \"parent\"," + + " \"_id\" : \"parent1\"" + + " }" + + "}" + + "\n" + + "{" + + " \"field1\" : \"value1\"" + + "}" + + "\n").array(); - byte[] addChild1 = new BytesArray("{\"update\" : { \"_id\" : \"child1\", \"_type\" : \"child\", \"_index\" : \"test\", \"parent\" : \"parent1\"} }\n" + - "{ \"script\" : {\"inline\" : \"ctx._source.field2 = 'value2'\"}, \"upsert\" : {\"field1\" : \"value1\"}}\n").array(); + byte[] addChild1 = new BytesArray( + "{" + + " \"update\" : {" + + " \"_index\" : \"test\"," + + " \"_type\" : \"child\"," + + " \"_id\" : \"child1\"," + + " \"parent\" : \"parent1\"" + + " }" + + "}" + + "\n" + + "{" + + " \"script\" : {" + + " \"inline\" : \"ctx._source.field2 = 'value2'\"" + + " }," + + " \"upsert\" : {" + + " \"field1\" : \"value1'\"" + + " }" + + "}" + + "\n").array(); - byte[] addChild2 = new BytesArray("{\"update\" : { \"_id\" : \"child1\", \"_type\" : \"child\", \"_index\" : \"test\", \"parent\" : \"parent1\"} }\n" + - "{ \"script\" : \"ctx._source.field2 = 'value2'\", \"upsert\" : {\"field1\" : \"value1\"}}\n").array(); + byte[] addChild2 = new BytesArray( + "{" + + " \"update\" : {" + + " \"_index\" : \"test\"," + + " \"_type\" : \"child\"," + + " \"_id\" : \"child1\"," + + " \"parent\" : \"parent1\"" + + " }" + + "}" + + "\n" + + "{" + + " \"script\" : \"ctx._source.field2 = 'value2'\"," + + " \"upsert\" : {" + + " \"field1\" : \"value1'\"" + + " }" + + "}" + + "\n").array(); builder.add(addParent, 0, addParent.length); builder.add(addChild1, 0, addChild1.length); @@ -490,15 +610,48 @@ public class BulkTests extends ESIntegTestCase { BulkRequestBuilder builder = client().prepareBulk(); - byte[] addParent = new BytesArray("{\"index\" : { \"_index\" : \"test\", \"_type\" : \"parent\", \"_id\" : \"parent1\"}}\n" - + "{\"field1\" : \"value1\"}\n").array(); + byte[] addParent = new BytesArray( + "{" + + " \"index\" : {" + + " \"_index\" : \"test\"," + + " \"_type\" : \"parent\"," + + " \"_id\" : \"parent1\"" + + " }" + + "}" + + "\n" + + "{" + + " \"field1\" : \"value1\"" + + "}" + + "\n").array(); byte[] addChildOK = new BytesArray( - "{\"index\" : { \"_id\" : \"child1\", \"_type\" : \"child\", \"_index\" : \"test\", \"parent\" : \"parent1\"} }\n" - + "{ \"field1\" : \"value1\"}\n").array(); + "{" + + " \"index\" : {" + + " \"_index\" : \"test\"," + + " \"_type\" : \"child\"," + + " \"_id\" : \"child1\"," + + " \"parent\" : \"parent1\"" + + " }" + + "}" + + "\n" + + "{" + + " \"field1\" : \"value1\"" + + "}" + + "\n").array(); + byte[] addChildMissingRouting = new BytesArray( - "{\"index\" : { \"_id\" : \"child2\", \"_type\" : \"child\", \"_index\" : \"test\"} }\n" + "{ \"field1\" : \"value1\"}\n") - .array(); + "{" + + " \"index\" : {" + + " \"_index\" : \"test\"," + + " \"_type\" : \"child\"," + + " \"_id\" : \"child1\"" + + " }" + + "}" + + "\n" + + "{" + + " \"field1\" : \"value1\"" + + "}" + + "\n").array(); builder.add(addParent, 0, addParent.length); builder.add(addChildOK, 0, addChildOK.length); @@ -523,19 +676,16 @@ public class BulkTests extends ESIntegTestCase { for (int i = 0; i < responses.length; i++) { final int threadID = i; - threads[threadID] = new Thread(new Runnable() { - @Override - public void run() { - try { - cyclicBarrier.await(); - } catch (Exception e) { - return; - } - BulkRequestBuilder requestBuilder = client().prepareBulk(); - requestBuilder.add(client().prepareUpdate("test", "type", "1").setVersion(1).setDoc("field", threadID)); - responses[threadID] = requestBuilder.get(); - + threads[threadID] = new Thread(() -> { + try { + cyclicBarrier.await(); + } catch (Exception e) { + return; } + BulkRequestBuilder requestBuilder = client().prepareBulk(); + requestBuilder.add(client().prepareUpdate("test", "type", "1").setVersion(1).setDoc("field", threadID)); + responses[threadID] = requestBuilder.get(); + }); threads[threadID].start(); @@ -613,7 +763,6 @@ public class BulkTests extends ESIntegTestCase { assertThat(bulkItemResponse.getItems()[5].getOpType(), is("delete")); } - private static String indexOrAlias() { return randomBoolean() ? "test" : "alias"; } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java index 4a0369a4019..2c320288edf 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java @@ -49,15 +49,10 @@ public class AggregationTestScriptsPlugin extends MockScriptPlugin { protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); - scripts.put("20 - _value", vars -> { - double value = (double) vars.get("_value"); - return 20.0d - value; - }); - - scripts.put("_value - 1", vars -> { - double value = (double) vars.get("_value"); - return value - 1.0d; - }); + scripts.put("20 - _value", vars -> 20.0d - (double) vars.get("_value")); + scripts.put("_value - 1", vars -> (double) vars.get("_value") - 1); + scripts.put("_value + 1", vars -> (double) vars.get("_value") + 1); + scripts.put("_value * -1", vars -> (double) vars.get("_value") * -1); scripts.put("_value - dec", vars -> { double value = (double) vars.get("_value"); @@ -65,6 +60,12 @@ public class AggregationTestScriptsPlugin extends MockScriptPlugin { return value - dec; }); + scripts.put("_value + inc", vars -> { + double value = (double) vars.get("_value"); + int inc = (int) vars.get("inc"); + return value + inc; + }); + scripts.put("doc['value'].value", vars -> { Map doc = (Map) vars.get("doc"); return doc.get("value"); @@ -77,6 +78,13 @@ public class AggregationTestScriptsPlugin extends MockScriptPlugin { return value.getValue() - dec; }); + scripts.put("doc['value'].value + inc", vars -> { + int inc = (int) vars.get("inc"); + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("value"); + return value.getValue() + inc; + }); + scripts.put("doc['values'].values", vars -> { Map doc = (Map) vars.get("doc"); return doc.get("values"); @@ -94,7 +102,17 @@ public class AggregationTestScriptsPlugin extends MockScriptPlugin { return res; }); - scripts.put("_value * -1", vars -> (double) vars.get("_value") * -1); + scripts.put("[ doc['value'].value, doc['value'].value - dec ]", vars -> { + Long a = ((ScriptDocValues.Longs) scripts.get("doc['value'].value").apply(vars)).getValue(); + Long b = (Long) scripts.get("doc['value'].value - dec").apply(vars); + return new Long[]{a, b}; + }); + + scripts.put("[ doc['value'].value, doc['value'].value + inc ]", vars -> { + Long a = ((ScriptDocValues.Longs) scripts.get("doc['value'].value").apply(vars)).getValue(); + Long b = (Long) scripts.get("doc['value'].value + inc").apply(vars); + return new Long[]{a, b}; + }); return scripts; } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java similarity index 92% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java index adbc66c2202..6bcb07ef9d8 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java @@ -16,18 +16,18 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.ScoreAccessor; import org.elasticsearch.script.Script; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; -import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -50,10 +50,13 @@ import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; +import static org.elasticsearch.script.ScriptService.ScriptType; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; @@ -69,15 +72,47 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsNull.notNullValue; -/** - * - */ @ESIntegTestCase.SuiteScopeTestCase -public class DoubleTermsTests extends AbstractTermsTestCase { +public class DoubleTermsIT extends AbstractTermsTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { + + @Override + @SuppressWarnings("unchecked") + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = super.pluginScripts(); + + scripts.put("(long) (_value / 1000 + 1)", vars -> (long) ((double) vars.get("_value") / 1000 + 1)); + + scripts.put("doc['" + MULTI_VALUED_FIELD_NAME + "']", vars -> { + Map doc = (Map) vars.get("doc"); + return doc.get(MULTI_VALUED_FIELD_NAME); + }); + + scripts.put("doc['" + MULTI_VALUED_FIELD_NAME + "'].value", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Doubles value = (ScriptDocValues.Doubles) doc.get(MULTI_VALUED_FIELD_NAME); + return value.getValue(); + }); + + scripts.put("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Doubles value = (ScriptDocValues.Doubles) doc.get(SINGLE_VALUED_FIELD_NAME); + return value.getValue(); + }); + + scripts.put("ceil(_score.doubleValue()/3)", vars -> { + ScoreAccessor score = (ScoreAccessor) vars.get("_score"); + return Math.ceil(score.doubleValue() / 3); + }); + + return scripts; + } } private static final int NUM_DOCS = 5; // TODO: randomize the size? @@ -415,8 +450,8 @@ public class DoubleTermsTests extends AbstractTermsTestCase { .addAggregation(terms("terms") .field(SINGLE_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("_value + 1"))) - .execute().actionGet(); + .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); @@ -468,8 +503,8 @@ public class DoubleTermsTests extends AbstractTermsTestCase { .addAggregation(terms("terms") .field(MULTI_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("_value + 1"))) - .execute().actionGet(); + .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); @@ -497,8 +532,8 @@ public class DoubleTermsTests extends AbstractTermsTestCase { .addAggregation(terms("terms") .field(MULTI_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("(long) (_value / 1000 + 1)"))) - .execute().actionGet(); + .script(new Script("(long) (_value / 1000 + 1)", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); @@ -537,8 +572,11 @@ public class DoubleTermsTests extends AbstractTermsTestCase { .prepareSearch("idx") .setTypes("type") .addAggregation( - terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).script( - new Script("doc['" + MULTI_VALUED_FIELD_NAME + "'].value"))).execute().actionGet(); + terms("terms") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "'].value", ScriptType.INLINE, + CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); @@ -561,8 +599,11 @@ public class DoubleTermsTests extends AbstractTermsTestCase { .prepareSearch("idx") .setTypes("type") .addAggregation( - terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).script( - new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']"))).execute().actionGet(); + terms("terms") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']", ScriptType.INLINE, + CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); @@ -1035,14 +1076,20 @@ public class DoubleTermsTests extends AbstractTermsTestCase { } public void testScriptScore() { + Script scoringScript = + new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", ScriptType.INLINE, CustomScriptPlugin .NAME, null); + + Script aggregationScript = new Script("ceil(_score.doubleValue()/3)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .setTypes("type") - .setQuery( - functionScoreQuery(ScoreFunctionBuilders.scriptFunction(new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value")))) + .setQuery(functionScoreQuery(scriptFunction(scoringScript))) .addAggregation( - terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).script( - new Script("ceil(_score.doubleValue()/3)"))).execute().actionGet(); + terms("terms") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(aggregationScript)) + .get(); assertSearchResponse(response); @@ -1087,7 +1134,9 @@ public class DoubleTermsTests extends AbstractTermsTestCase { public void testSingleValuedFieldOrderedByThreeCriteria() throws Exception { double[] expectedKeys = new double[] { 2, 1, 4, 5, 3, 6, 7 }; - assertMultiSortResponse(expectedKeys, Terms.Order.count(false), Terms.Order.aggregation("sum_d", false), Terms.Order.aggregation("avg_l", false)); + assertMultiSortResponse(expectedKeys, Terms.Order.count(false), + Terms.Order.aggregation("sum_d", false), + Terms.Order.aggregation("avg_l", false)); } public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound() throws Exception { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java similarity index 94% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java index 7bb1ed6fd65..6d5e190b542 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java @@ -16,17 +16,17 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; -import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -49,9 +49,11 @@ import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.script.ScriptService.ScriptType; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; @@ -67,15 +69,36 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsNull.notNullValue; -/** - * - */ @ESIntegTestCase.SuiteScopeTestCase -public class LongTermsTests extends AbstractTermsTestCase { +public class LongTermsIT extends AbstractTermsTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { + + @Override + @SuppressWarnings("unchecked") + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = super.pluginScripts(); + + scripts.put("floor(_value / 1000 + 1)", vars -> Math.floor((double) vars.get("_value") / 1000 + 1)); + + scripts.put("doc['" + MULTI_VALUED_FIELD_NAME + "']", vars -> { + Map doc = (Map) vars.get("doc"); + return doc.get(MULTI_VALUED_FIELD_NAME); + }); + + scripts.put("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get(SINGLE_VALUED_FIELD_NAME); + return value.getValue(); + }); + + return scripts; + } } private static final int NUM_DOCS = 5; // TODO randomize the size? @@ -418,8 +441,8 @@ public class LongTermsTests extends AbstractTermsTestCase { .addAggregation(terms("terms") .field(SINGLE_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("_value + 1"))) - .execute().actionGet(); + .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); @@ -471,8 +494,8 @@ public class LongTermsTests extends AbstractTermsTestCase { .addAggregation(terms("terms") .field(MULTI_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("_value - 1"))) - .execute().actionGet(); + .script(new Script("_value - 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); @@ -500,8 +523,8 @@ public class LongTermsTests extends AbstractTermsTestCase { .addAggregation(terms("terms") .field(MULTI_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("floor(_value / 1000 + 1)"))) - .execute().actionGet(); + .script(new Script("floor(_value / 1000 + 1)", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); @@ -536,12 +559,13 @@ public class LongTermsTests extends AbstractTermsTestCase { */ public void testScriptSingleValue() throws Exception { + Script script = new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .collectMode(randomFrom(SubAggCollectionMode.values())) - .script( - new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value"))) - .execute().actionGet(); + .script(script)) + .get(); assertSearchResponse(response); @@ -561,12 +585,13 @@ public class LongTermsTests extends AbstractTermsTestCase { } public void testScriptMultiValued() throws Exception { + Script script = new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .collectMode(randomFrom(SubAggCollectionMode.values())) - .script( - new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']"))) - .execute().actionGet(); + .script(script)) + .get(); assertSearchResponse(response); @@ -711,9 +736,11 @@ public class LongTermsTests extends AbstractTermsTestCase { .field(SINGLE_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())) .order(Terms.Order.aggregation("avg_i", asc)) - .subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)).subAggregation(terms("subTerms").field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - ).execute().actionGet(); + .subAggregation( + avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)) + .subAggregation( + terms("subTerms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))) + ).get(); assertSearchResponse(response); @@ -753,8 +780,8 @@ public class LongTermsTests extends AbstractTermsTestCase { .field("num_tag") .collectMode(randomFrom(SubAggCollectionMode.values())) .order(Terms.Order.aggregation("filter", asc)) -.subAggregation(filter("filter", QueryBuilders.matchAllQuery())) - ).execute().actionGet(); + .subAggregation(filter("filter", QueryBuilders.matchAllQuery())) + ).get(); assertSearchResponse(response); @@ -1064,7 +1091,9 @@ public class LongTermsTests extends AbstractTermsTestCase { public void testSingleValuedFieldOrderedByThreeCriteria() throws Exception { long[] expectedKeys = new long[] { 2, 1, 4, 5, 3, 6, 7 }; - assertMultiSortResponse(expectedKeys, Terms.Order.count(false), Terms.Order.aggregation("sum_d", false), Terms.Order.aggregation("avg_l", false)); + assertMultiSortResponse(expectedKeys, Terms.Order.count(false), + Terms.Order.aggregation("sum_d", false), + Terms.Order.aggregation("avg_l", false)); } public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound() throws Exception { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java similarity index 85% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java index 640c00b291d..012df7bfbff 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.bucket; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; @@ -25,12 +25,13 @@ import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; -import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -44,10 +45,13 @@ import org.joda.time.format.DateTimeFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -58,18 +62,45 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; - @ESIntegTestCase.SuiteScopeTestCase -public class MinDocCountTests extends AbstractTermsTestCase { +public class MinDocCountIT extends AbstractTermsTestCase { + + private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true); + private static int cardinality; @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); } - private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true); + public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { - private static int cardinality; + @Override + @SuppressWarnings("unchecked") + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("doc['d'].values", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Doubles value = (ScriptDocValues.Doubles) doc.get("d"); + return value.getValues(); + }); + + scripts.put("doc['l'].values", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("l"); + return value.getValues(); + }); + + scripts.put("doc['s'].values", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Strings value = (ScriptDocValues.Strings) doc.get("s"); + return value.getValues(); + }); + + return scripts; + } + } @Override public void setupSuiteScopeCluster() throws Exception { @@ -90,7 +121,8 @@ public class MinDocCountTests extends AbstractTermsTestCase { longTerm = randomInt(cardinality * 2); } while (!longTerms.add(longTerm)); double doubleTerm = longTerm * Math.PI; - String dateTerm = DateTimeFormat.forPattern("yyyy-MM-dd").print(new DateTime(2014, 1, ((int) longTerm % 20) + 1, 0, 0, DateTimeZone.UTC)); + String dateTerm = DateTimeFormat.forPattern("yyyy-MM-dd") + .print(new DateTime(2014, 1, ((int) longTerm % 20) + 1, 0, 0, DateTimeZone.UTC)); final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20); for (int j = 0; j < frequency; ++j) { indexRequests.add(client().prepareIndex("idx", "type").setSource(jsonBuilder() @@ -119,7 +151,8 @@ public class MinDocCountTests extends AbstractTermsTestCase { YES { @Override TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field) { - return builder.script(new org.elasticsearch.script.Script("doc['" + field + "'].values")); + return builder.script(new org.elasticsearch.script.Script("doc['" + field + "'].values", ScriptService.ScriptType.INLINE, + CustomScriptPlugin.NAME, null)); } }; abstract TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field); @@ -272,7 +305,7 @@ public class MinDocCountTests extends AbstractTermsTestCase { testMinDocCountOnTerms(field, script, order, null, true); } - private void testMinDocCountOnTerms(String field, Script script, Terms.Order order, String include, boolean retryOnFailure) throws Exception { + private void testMinDocCountOnTerms(String field, Script script, Terms.Order order, String include, boolean retry) throws Exception { // all terms final SearchResponse allTermsResponse = client().prepareSearch("idx").setTypes("type") .setSize(0) @@ -307,7 +340,7 @@ public class MinDocCountTests extends AbstractTermsTestCase { assertAllSuccessful(response); assertSubset(allTerms, (Terms) response.getAggregations().get("terms"), minDocCount, size, include); } catch (AssertionError ae) { - if (!retryOnFailure) { + if (!retry) { throw ae; } logger.info("test failed. trying to see if it recovers after 1m.", ae); @@ -380,8 +413,13 @@ public class MinDocCountTests extends AbstractTermsTestCase { final SearchResponse allResponse = client().prepareSearch("idx").setTypes("type") .setSize(0) .setQuery(QUERY) - .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).order(order).minDocCount(0)) - .execute().actionGet(); + .addAggregation( + dateHistogram("histo") + .field("date") + .dateHistogramInterval(DateHistogramInterval.DAY) + .order(order) + .minDocCount(0)) + .get(); final Histogram allHisto = allResponse.getAggregations().get("histo"); @@ -389,9 +427,14 @@ public class MinDocCountTests extends AbstractTermsTestCase { final SearchResponse response = client().prepareSearch("idx").setTypes("type") .setSize(0) .setQuery(QUERY) - .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).order(order).minDocCount(minDocCount)) - .execute().actionGet(); - assertSubset(allHisto, (Histogram) response.getAggregations().get("histo"), minDocCount); + .addAggregation( + dateHistogram("histo") + .field("date") + .dateHistogramInterval(DateHistogramInterval.DAY) + .order(order) + .minDocCount(minDocCount)) + .get(); + assertSubset(allHisto, response.getAggregations().get("histo"), minDocCount); } } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java similarity index 93% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java index b098db154da..ba3b9380504 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.bucket; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ElasticsearchException; @@ -24,14 +24,14 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.internal.IndexFieldMapper; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; -import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; -import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -56,10 +56,12 @@ import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.script.ScriptService.ScriptType; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.count; import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats; @@ -76,18 +78,41 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsNull.notNullValue; import static org.hamcrest.core.IsNull.nullValue; -/** - * - */ @ESIntegTestCase.SuiteScopeTestCase -public class StringTermsTests extends AbstractTermsTestCase { +public class StringTermsIT extends AbstractTermsTestCase { + private static final String SINGLE_VALUED_FIELD_NAME = "s_value"; private static final String MULTI_VALUED_FIELD_NAME = "s_values"; private static Map> expectedMultiSortBuckets; @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { + + @Override + @SuppressWarnings("unchecked") + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = super.pluginScripts(); + + scripts.put("'foo_' + _value", vars -> "foo_" + (String) vars.get("_value")); + scripts.put("_value.substring(0,3)", vars -> ((String) vars.get("_value")).substring(0, 3)); + + scripts.put("doc['" + MULTI_VALUED_FIELD_NAME + "']", vars -> { + Map doc = (Map) vars.get("doc"); + return doc.get(MULTI_VALUED_FIELD_NAME); + }); + + scripts.put("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Strings value = (ScriptDocValues.Strings) doc.get(SINGLE_VALUED_FIELD_NAME); + return value.getValue(); + }); + + return scripts; + } } @Override @@ -435,8 +460,11 @@ public class StringTermsTests extends AbstractTermsTestCase { .prepareSearch("idx") .setTypes("high_card_type") .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME).size(20) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true))) // we need to sort by terms cause we're checking the first 20 values + terms("terms") + .executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME).size(20) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(Terms.Order.term(true))) // we need to sort by terms cause we're checking the first 20 values .execute().actionGet(); assertSearchResponse(response); @@ -542,9 +570,12 @@ public class StringTermsTests extends AbstractTermsTestCase { .prepareSearch("idx") .setTypes("type") .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).script(new Script("'foo_' + _value"))).execute() - .actionGet(); + terms("terms") + .executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script("'foo_' + _value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); @@ -566,9 +597,12 @@ public class StringTermsTests extends AbstractTermsTestCase { .prepareSearch("idx") .setTypes("type") .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).script(new Script("_value.substring(0,3)"))) - .execute().actionGet(); + terms("terms") + .executionHint(randomExecutionHint()) + .field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script("_value.substring(0,3)", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); @@ -615,8 +649,11 @@ public class StringTermsTests extends AbstractTermsTestCase { .prepareSearch("idx") .setTypes("type") .addAggregation( - terms("terms").executionHint(randomExecutionHint()).script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']")) - .collectMode(randomFrom(SubAggCollectionMode.values()))).execute().actionGet(); + terms("terms") + .executionHint(randomExecutionHint()) + .script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .collectMode(randomFrom(SubAggCollectionMode.values()))) + .get(); assertSearchResponse(response); @@ -642,9 +679,12 @@ public class StringTermsTests extends AbstractTermsTestCase { .prepareSearch("idx") .setTypes("type") .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).script(new Script("'foo_' + _value"))).execute() - .actionGet(); + terms("terms") + .executionHint(randomExecutionHint()) + .field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script("'foo_' + _value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); @@ -678,12 +718,17 @@ public class StringTermsTests extends AbstractTermsTestCase { */ public void testScriptSingleValue() throws Exception { + Script script = new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation( - terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()) - .script(new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value"))).execute().actionGet(); + terms("terms") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(randomExecutionHint()) + .script(script)) + .get(); assertSearchResponse(response); @@ -701,12 +746,17 @@ public class StringTermsTests extends AbstractTermsTestCase { } public void testScriptSingleValueExplicitSingleValue() throws Exception { + Script script = new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation( - terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()) - .script(new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value"))).execute().actionGet(); + terms("terms") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(randomExecutionHint()) + .script(script)) + .get(); assertSearchResponse(response); @@ -728,8 +778,11 @@ public class StringTermsTests extends AbstractTermsTestCase { .prepareSearch("idx") .setTypes("type") .addAggregation( - terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()) - .script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']"))).execute().actionGet(); + terms("terms") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(randomExecutionHint()) + .script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); @@ -821,7 +874,11 @@ public class StringTermsTests extends AbstractTermsTestCase { .prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L).minDocCount(0).subAggregation(terms("terms").field("value"))) + histogram("histo") + .field(SINGLE_VALUED_FIELD_NAME) + .interval(1L) + .minDocCount(0) + .subAggregation(terms("terms").field("value"))) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L)); @@ -1143,7 +1200,8 @@ public class StringTermsTests extends AbstractTermsTestCase { .subAggregation(terms("values").field("i").collectMode(randomFrom(SubAggCollectionMode.values())))) .execute().actionGet(); - fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation which is not of a metrics or single-bucket type"); + fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation " + + "which is not of a metrics or single-bucket type"); } catch (ElasticsearchException e) { // expected diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java similarity index 92% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java index d7fc77c9ccb..6f4f891ea65 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -16,20 +16,19 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.missing.Missing; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order; -import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats.Bounds; @@ -53,13 +52,11 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; -/** - * - */ -public class ExtendedStatsTests extends AbstractNumericTestCase { +public class ExtendedStatsIT extends AbstractNumericTestCase { + @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(AggregationTestScriptsPlugin.class); } private static double stdDev(int... vals) { @@ -298,7 +295,11 @@ public class ExtendedStatsTests extends AbstractNumericTestCase { double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(extendedStats("stats").field("value").script(new Script("_value + 1")).sigma(sigma)) + .addAggregation( + extendedStats("stats") + .field("value") + .script(new Script("_value + 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, null)) + .sigma(sigma)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -325,7 +326,9 @@ public class ExtendedStatsTests extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - extendedStats("stats").field("value").script(new Script("_value + inc", ScriptType.INLINE, null, params)) + extendedStats("stats") + .field("value") + .script(new Script("_value + inc", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) .sigma(sigma)) .execute().actionGet(); @@ -374,7 +377,11 @@ public class ExtendedStatsTests extends AbstractNumericTestCase { double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(extendedStats("stats").field("values").script(new Script("_value - 1")).sigma(sigma)) + .addAggregation( + extendedStats("stats") + .field("values") + .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, null)) + .sigma(sigma)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -401,9 +408,11 @@ public class ExtendedStatsTests extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - extendedStats("stats").field("values").script(new Script("_value - dec", ScriptType.INLINE, null, params)) + extendedStats("stats") + .field("values") + .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) .sigma(sigma)) - .execute().actionGet(); + .get(); assertHitCount(searchResponse, 10); @@ -426,7 +435,10 @@ public class ExtendedStatsTests extends AbstractNumericTestCase { double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(extendedStats("stats").script(new Script("doc['value'].value")).sigma(sigma)) + .addAggregation( + extendedStats("stats") + .script(new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, null)) + .sigma(sigma)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -449,11 +461,16 @@ public class ExtendedStatsTests extends AbstractNumericTestCase { public void testScriptSingleValuedWithParams() throws Exception { Map params = new HashMap<>(); params.put("inc", 1); + + Script script = new Script("doc['value'].value + inc", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - extendedStats("stats").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)).sigma(sigma)) + extendedStats("stats") + .script(script) + .sigma(sigma)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -477,7 +494,10 @@ public class ExtendedStatsTests extends AbstractNumericTestCase { double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(extendedStats("stats").script(new Script("doc['values'].values")).sigma(sigma)) + .addAggregation( + extendedStats("stats") + .script(new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, null)) + .sigma(sigma)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -500,12 +520,16 @@ public class ExtendedStatsTests extends AbstractNumericTestCase { public void testScriptMultiValuedWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); + + Script script = new Script("[ doc['value'].value, doc['value'].value - dec ]", ScriptType.INLINE, + AggregationTestScriptsPlugin.NAME, params); + double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - extendedStats("stats").script( - new Script("[ doc['value'].value, doc['value'].value - dec ]", ScriptType.INLINE, null, params)) + extendedStats("stats") + .script(script) .sigma(sigma)) .execute().actionGet(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MaxTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java similarity index 85% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MaxTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java index 22af6dd486e..d4ac8835ee0 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MaxTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java @@ -16,26 +16,27 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order; -import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.max.Max; + import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import static java.util.Collections.emptyMap; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; @@ -50,10 +51,10 @@ import static org.hamcrest.Matchers.notNullValue; /** * */ -public class MaxTests extends AbstractNumericTestCase { +public class MaxIT extends AbstractNumericTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(AggregationTestScriptsPlugin.class); } @Override @@ -161,7 +162,10 @@ public class MaxTests extends AbstractNumericTestCase { public void testSingleValuedFieldWithValueScript() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(max("max").field("value").script(new Script("_value + 1"))) + .addAggregation( + max("max") + .field("value") + .script(new Script("_value + 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -178,8 +182,11 @@ public class MaxTests extends AbstractNumericTestCase { params.put("inc", 1); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(max("max").field("value").script(new Script("_value + inc", ScriptType.INLINE, null, params))) - .execute().actionGet(); + .addAggregation( + max("max") + .field("value") + .script(new Script("_value + inc", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params))) + .get(); assertHitCount(searchResponse, 10); @@ -208,8 +215,11 @@ public class MaxTests extends AbstractNumericTestCase { public void testMultiValuedFieldWithValueScript() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(max("max").field("values").script(new Script("_value + 1"))) - .execute().actionGet(); + .addAggregation( + max("max") + .field("values") + .script(new Script("_value + 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .get(); assertHitCount(searchResponse, 10); @@ -225,8 +235,11 @@ public class MaxTests extends AbstractNumericTestCase { params.put("inc", 1); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(max("max").field("values").script(new Script("_value + inc", ScriptType.INLINE, null, params))) - .execute().actionGet(); + .addAggregation( + max("max") + .field("values") + .script(new Script("_value + inc", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params))) + .get(); assertHitCount(searchResponse, 10); @@ -240,7 +253,9 @@ public class MaxTests extends AbstractNumericTestCase { public void testScriptSingleValued() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(max("max").script(new Script("doc['value'].value"))) + .addAggregation( + max("max") + .script(new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -255,10 +270,13 @@ public class MaxTests extends AbstractNumericTestCase { public void testScriptSingleValuedWithParams() throws Exception { Map params = new HashMap<>(); params.put("inc", 1); + + Script script = new Script("doc['value'].value + inc", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(max("max").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params))) - .execute().actionGet(); + .addAggregation(max("max").script(script)) + .get(); assertHitCount(searchResponse, 10); @@ -272,8 +290,10 @@ public class MaxTests extends AbstractNumericTestCase { public void testScriptMultiValued() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(max("max").script(new Script("doc['values'].values"))) - .execute().actionGet(); + .addAggregation( + max("max") + .script(new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, null))) + .get(); assertHitCount(searchResponse, 10); @@ -287,10 +307,13 @@ public class MaxTests extends AbstractNumericTestCase { public void testScriptMultiValuedWithParams() throws Exception { Map params = new HashMap<>(); params.put("inc", 1); + + Script script = new Script("[ doc['value'].value, doc['value'].value + inc ]", ScriptType.INLINE, + AggregationTestScriptsPlugin.NAME, params); + SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation( - max("max").script(new Script("[ doc['value'].value, doc['value'].value + inc ]", ScriptType.INLINE, null, params))) - .execute().actionGet(); + .addAggregation(max("max").script(script)) + .get(); assertHitCount(searchResponse, 10); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java similarity index 86% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java index f61aad9b137..56c12fbc77f 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java @@ -16,19 +16,18 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order; -import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.min.Min; import java.util.Collection; @@ -37,6 +36,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static java.util.Collections.emptyMap; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; @@ -51,10 +51,10 @@ import static org.hamcrest.Matchers.notNullValue; /** * */ -public class MinTests extends AbstractNumericTestCase { +public class MinIT extends AbstractNumericTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(AggregationTestScriptsPlugin.class); } @Override @@ -163,8 +163,11 @@ public class MinTests extends AbstractNumericTestCase { public void testSingleValuedFieldWithValueScript() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(min("min").field("value").script(new Script("_value - 1"))) - .execute().actionGet(); + .addAggregation( + min("min") + .field("value") + .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .get(); assertHitCount(searchResponse, 10); @@ -178,10 +181,13 @@ public class MinTests extends AbstractNumericTestCase { public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); + + Script script = new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(min("min").field("value").script(new Script("_value - dec", ScriptType.INLINE, null, params))) - .execute().actionGet(); + .addAggregation(min("min").field("value").script(script)) + .get(); assertHitCount(searchResponse, 10); @@ -210,7 +216,11 @@ public class MinTests extends AbstractNumericTestCase { public void testMultiValuedFieldWithValueScript() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(min("min").field("values").script(new Script("_value - 1"))).execute().actionGet(); + .addAggregation( + min("min") + .field("values") + .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .get(); assertHitCount(searchResponse, 10); @@ -224,7 +234,11 @@ public class MinTests extends AbstractNumericTestCase { // test what happens when values arrive in reverse order since the min // aggregator is optimized to work on sorted values SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(min("min").field("values").script(new Script("_value * -1"))).execute().actionGet(); + .addAggregation( + min("min") + .field("values") + .script(new Script("_value * -1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .get(); assertHitCount(searchResponse, 10); @@ -238,9 +252,12 @@ public class MinTests extends AbstractNumericTestCase { public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); + + Script script = new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(min("min").field("values").script(new Script("_value - dec", ScriptType.INLINE, null, params))).execute() - .actionGet(); + .addAggregation(min("min").field("values").script(script)) + .get(); assertHitCount(searchResponse, 10); @@ -252,8 +269,11 @@ public class MinTests extends AbstractNumericTestCase { @Override public void testScriptSingleValued() throws Exception { + Script script = new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); + SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(min("min").script(new Script("doc['value'].value"))).execute().actionGet(); + .addAggregation(min("min").script(script)) + .get(); assertHitCount(searchResponse, 10); @@ -267,9 +287,12 @@ public class MinTests extends AbstractNumericTestCase { public void testScriptSingleValuedWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); + + Script script = new Script("doc['value'].value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(min("min").script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params))).execute() - .actionGet(); + .addAggregation(min("min").script(script)) + .get(); assertHitCount(searchResponse, 10); @@ -281,8 +304,10 @@ public class MinTests extends AbstractNumericTestCase { @Override public void testScriptMultiValued() throws Exception { + Script script = new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(min("min").script(new Script("doc['values'].values"))).execute().actionGet(); + .addAggregation(min("min").script(script)) + .get(); assertHitCount(searchResponse, 10); @@ -296,14 +321,12 @@ public class MinTests extends AbstractNumericTestCase { public void testScriptMultiValuedWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); + SearchResponse searchResponse = client() .prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation( - min("min") - .script(new Script( - "List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;", - ScriptType.INLINE, null, params))).execute().actionGet(); + .addAggregation(min("min").script(AggregationTestScriptsPlugin.DECREMENT_ALL_VALUES)) + .get(); assertHitCount(searchResponse, 10); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java similarity index 89% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java index a8eec2ede85..4e46a0b6a66 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java @@ -16,20 +16,19 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order; -import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.stats.Stats; import java.util.Collection; @@ -38,6 +37,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static java.util.Collections.emptyMap; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; @@ -51,13 +51,10 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; -/** - * - */ -public class StatsTests extends AbstractNumericTestCase { +public class StatsIT extends AbstractNumericTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(AggregationTestScriptsPlugin.class); } @Override @@ -224,8 +221,11 @@ public class StatsTests extends AbstractNumericTestCase { public void testSingleValuedFieldWithValueScript() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(stats("stats").field("value").script(new Script("_value + 1"))) - .execute().actionGet(); + .addAggregation( + stats("stats") + .field("value") + .script(new Script("_value + 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .get(); assertShardExecutionState(searchResponse, 0); @@ -247,8 +247,11 @@ public class StatsTests extends AbstractNumericTestCase { params.put("inc", 1); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(stats("stats").field("value").script(new Script("_value + inc", ScriptType.INLINE, null, params))) - .execute().actionGet(); + .addAggregation( + stats("stats") + .field("value") + .script(new Script("_value + inc", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params))) + .get(); assertShardExecutionState(searchResponse, 0); @@ -289,8 +292,11 @@ public class StatsTests extends AbstractNumericTestCase { public void testMultiValuedFieldWithValueScript() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(stats("stats").field("values").script(new Script("_value - 1"))) - .execute().actionGet(); + .addAggregation( + stats("stats") + .field("values") + .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .get(); assertShardExecutionState(searchResponse, 0); @@ -312,8 +318,11 @@ public class StatsTests extends AbstractNumericTestCase { params.put("dec", 1); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(stats("stats").field("values").script(new Script("_value - dec", ScriptType.INLINE, null, params))) - .execute().actionGet(); + .addAggregation( + stats("stats") + .field("values") + .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params))) + .get(); assertShardExecutionState(searchResponse, 0); @@ -333,8 +342,10 @@ public class StatsTests extends AbstractNumericTestCase { public void testScriptSingleValued() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(stats("stats").script(new Script("doc['value'].value"))) - .execute().actionGet(); + .addAggregation( + stats("stats") + .script(new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()))) + .get(); assertShardExecutionState(searchResponse, 0); @@ -354,10 +365,13 @@ public class StatsTests extends AbstractNumericTestCase { public void testScriptSingleValuedWithParams() throws Exception { Map params = new HashMap<>(); params.put("inc", 1); + + Script script = new Script("doc['value'].value + inc", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(stats("stats").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params))) - .execute().actionGet(); + .addAggregation(stats("stats").script(script)) + .get(); assertShardExecutionState(searchResponse, 0); @@ -375,10 +389,12 @@ public class StatsTests extends AbstractNumericTestCase { @Override public void testScriptMultiValued() throws Exception { + Script script = new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); + SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(stats("stats").script(new Script("doc['values'].values"))) - .execute().actionGet(); + .addAggregation(stats("stats").script(script)) + .get(); assertShardExecutionState(searchResponse, 0); @@ -398,12 +414,16 @@ public class StatsTests extends AbstractNumericTestCase { public void testScriptMultiValuedWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); + + Script script = new Script("[ doc['value'].value, doc['value'].value - dec ]", ScriptType.INLINE, + AggregationTestScriptsPlugin.NAME, params); + SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - stats("stats").script( - new Script("[ doc['value'].value, doc['value'].value - dec ]", ScriptType.INLINE, null, params))) - .execute().actionGet(); + stats("stats") + .script(script)) + .get(); assertShardExecutionState(searchResponse, 0); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java similarity index 76% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java rename to core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index 0f9279da8f2..3854d200116 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -17,32 +17,38 @@ * under the License. */ -package org.elasticsearch.messy.tests; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.ExecutionException; +package org.elasticsearch.search.functionscore; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder.FilterFunctionBuilder; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.MockScriptPlugin; +import org.elasticsearch.script.ScoreAccessor; import org.elasticsearch.script.Script; -import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.ESIntegTestCase; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.function.Function; + import static org.elasticsearch.client.Requests.searchRequest; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; +import static org.elasticsearch.script.ScriptService.ScriptType; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -50,27 +56,54 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSear import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public class FunctionScoreTests extends ESIntegTestCase { +public class FunctionScoreIT extends ESIntegTestCase { + static final String TYPE = "type"; static final String INDEX = "index"; @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + @SuppressWarnings("unchecked") + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + scripts.put("1", vars -> 1.0d); + scripts.put("get score value", vars -> ((ScoreAccessor) vars.get("_score")).doubleValue()); + scripts.put("return (doc['num'].value)", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Longs num = (ScriptDocValues.Longs) doc.get("num"); + return num.getValue(); + }); + scripts.put("doc['random_score']", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Doubles randomScore = (ScriptDocValues.Doubles) doc.get("random_score"); + return randomScore.getValue(); + }); + return scripts; + } } public void testScriptScoresNested() throws IOException { createIndex(INDEX); index(INDEX, TYPE, "1", jsonBuilder().startObject().field("dummy_field", 1).endObject()); refresh(); + + Script scriptOne = new Script("1", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script scriptTwo = new Script("get score value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client().search( searchRequest().source( searchSource().query( functionScoreQuery( functionScoreQuery( - functionScoreQuery(scriptFunction(new Script("1"))), - scriptFunction(new Script("_score.doubleValue()"))), - scriptFunction(new Script("_score.doubleValue()")) + functionScoreQuery(scriptFunction(scriptOne)), + scriptFunction(scriptTwo)), + scriptFunction(scriptTwo) ) ) ) @@ -83,10 +116,14 @@ public class FunctionScoreTests extends ESIntegTestCase { createIndex(INDEX); index(INDEX, TYPE, "1", jsonBuilder().startObject().field("dummy_field", 1).endObject()); refresh(); + + Script script = new Script("get score value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client().search( searchRequest().source( - searchSource().query(functionScoreQuery(scriptFunction(new Script("_score.doubleValue()")))).aggregation( - terms("score_agg").script(new Script("_score.doubleValue()"))) + searchSource() + .query(functionScoreQuery(scriptFunction(script))) + .aggregation(terms("score_agg").script(script)) ) ).actionGet(); assertSearchResponse(response); @@ -100,10 +137,17 @@ public class FunctionScoreTests extends ESIntegTestCase { refresh(); float score = randomFloat(); float minScore = randomFloat(); + + index(INDEX, TYPE, jsonBuilder().startObject() + .field("num", 2) + .field("random_score", score) // Pass the random score as a document field so that it can be extracted in the script + .endObject()); + refresh(); + ensureYellow(); + + Script script = new Script("doc['random_score']", ScriptType.INLINE, CustomScriptPlugin.NAME, null); SearchResponse searchResponse = client().search( - searchRequest().source( - searchSource().query( - functionScoreQuery(scriptFunction(new Script(Float.toString(score)))).setMinScore(minScore))) + searchRequest().source(searchSource().query(functionScoreQuery(scriptFunction(script)).setMinScore(minScore))) ).actionGet(); if (score < minScore) { assertThat(searchResponse.getHits().getTotalHits(), is(0L)); @@ -113,8 +157,8 @@ public class FunctionScoreTests extends ESIntegTestCase { searchResponse = client().search( searchRequest().source(searchSource().query(functionScoreQuery(new MatchAllQueryBuilder(), new FilterFunctionBuilder[] { - new FilterFunctionBuilder(scriptFunction(new Script(Float.toString(score)))), - new FilterFunctionBuilder(scriptFunction(new Script(Float.toString(score)))) + new FilterFunctionBuilder(scriptFunction(script)), + new FilterFunctionBuilder(scriptFunction(script)) }).scoreMode(FiltersFunctionScoreQuery.ScoreMode.AVG).setMinScore(minScore))) ).actionGet(); if (score < minScore) { @@ -133,7 +177,7 @@ public class FunctionScoreTests extends ESIntegTestCase { docs.add(client().prepareIndex(INDEX, TYPE, Integer.toString(i)).setSource("num", i + scoreOffset)); } indexRandom(true, docs); - Script script = new Script("return (doc['num'].value)"); + Script script = new Script("return (doc['num'].value)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); int numMatchingDocs = numDocs + scoreOffset - minScore; if (numMatchingDocs < 0) { numMatchingDocs = 0; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java index 867e6d29c88..383e7dd6028 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java @@ -42,18 +42,12 @@ ^^^^^ note: the methods from this test using mustache were moved to the mustache module under its messy tests package. renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateHistogramTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java renamed: core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java renamed: core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/IPv4RangeIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java renamed: core/src/test/java/org/elasticsearch/script/IndexLookupIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java renamed: core/src/test/java/org/elasticsearch/script/IndexedScriptIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexedScriptTests.java renamed: core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/InnerHitsTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MaxTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinTests.java renamed: core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/PercolatorTests.java renamed: core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java @@ -65,8 +59,6 @@ renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SignificantTermsSignificanceScoreTests.java renamed: core/src/test/java/org/elasticsearch/nested/SimpleNestedIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleNestedTests.java renamed: core/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SumTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/TopHitsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TopHitsTests.java renamed: core/src/test/java/org/elasticsearch/index/mapper/TransformOnIndexMapperIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TransformOnIndexMapperTests.java diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index 519e52074da..b6fddaa427c 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -20,6 +20,7 @@ package org.elasticsearch.script; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorer; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.search.lookup.LeafSearchLookup; @@ -183,6 +184,12 @@ public class MockScriptEngine implements ScriptEngineService { public void setNextVar(String name, Object value) { ctx.put(name, value); } + + @Override + public void setScorer(Scorer scorer) { + super.setScorer(scorer); + ctx.put("_score", new ScoreAccessor(scorer)); + } }; leafSearchScript.setLookup(leafLookup); return leafSearchScript; @@ -190,7 +197,7 @@ public class MockScriptEngine implements ScriptEngineService { @Override public boolean needsScores() { - return false; + return true; } } } From a784055db1294b4f65a8ca049dcb80948b9c7646 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Jul 2016 21:43:39 +0200 Subject: [PATCH 53/93] Cleaned up the tests in lang-mustache. Messy tests with mustache were either moved to core, moved to a rest test or remained untouched if they actually tested mustache. Also removed tests that were redundant. --- .../search/suggest/SuggestSearchIT.java | 239 ++++++++++++++ modules/lang-mustache/build.gradle | 7 +- .../search/template/SearchTemplateIT.java} | 301 +++++------------- .../tests/RenderSearchTemplateTests.java | 171 ---------- .../messy/tests/SuggestSearchTests.java | 254 --------------- .../messy/tests/TemplateQueryParserTests.java | 225 ------------- .../messy/tests/package-info.java | 46 --- .../mustache/MustacheScriptEngineTests.java | 30 ++ .../scripts/full-query-template.mustache | 6 - .../config/scripts/storedTemplate.mustache | 3 - ...te.yaml => 20_render_search_template.yaml} | 56 +++- .../test/lang_mustache/20_search.yaml | 34 -- .../lang_mustache/30_search_template.yaml | 108 +++++++ .../40_search_request_template.yaml | 38 --- ..._execution.yaml => 40_template_query.yaml} | 32 +- .../lang_mustache/50_messy_test_msearch.yaml | 33 -- ...sic.yaml => 50_multi_search_template.yaml} | 71 +++++ .../msearch_template/20_stored_template.yaml | 66 ---- .../msearch_template/30_file_template.yaml | 72 ----- .../templates/file_query_template.mustache | 5 + .../templates/file_search_template.mustache | 12 + .../scripts => templates}/template_1.mustache | 2 +- 22 files changed, 635 insertions(+), 1176 deletions(-) rename modules/lang-mustache/src/test/java/org/elasticsearch/{messy/tests/TemplateQueryTests.java => action/search/template/SearchTemplateIT.java} (53%) delete mode 100644 modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java delete mode 100644 modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java delete mode 100644 modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java delete mode 100644 modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java delete mode 100644 modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/full-query-template.mustache delete mode 100644 modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/storedTemplate.mustache rename modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/{30_render_search_template.yaml => 20_render_search_template.yaml} (72%) delete mode 100644 modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/20_search.yaml create mode 100644 modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_search_template.yaml delete mode 100644 modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/40_search_request_template.yaml rename modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/{30_template_query_execution.yaml => 40_template_query.yaml} (62%) delete mode 100644 modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_messy_test_msearch.yaml rename modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/{msearch_template/10_basic.yaml => 50_multi_search_template.yaml} (64%) delete mode 100644 modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/20_stored_template.yaml delete mode 100644 modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/30_file_template.yaml create mode 100644 modules/lang-mustache/src/test/resources/templates/file_query_template.mustache create mode 100644 modules/lang-mustache/src/test/resources/templates/file_search_template.mustache rename modules/lang-mustache/src/test/resources/{org/elasticsearch/messy/tests/config/scripts => templates}/template_1.mustache (99%) diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index 09129072177..b10272329e5 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -19,15 +19,24 @@ package org.elasticsearch.search.suggest; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.ReduceSearchPhaseException; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptEngineService; +import org.elasticsearch.script.SearchScript; +import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.suggest.phrase.DirectCandidateGeneratorBuilder; import org.elasticsearch.search.suggest.phrase.Laplace; import org.elasticsearch.search.suggest.phrase.LinearInterpolation; @@ -42,6 +51,7 @@ import java.io.IOException; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -57,6 +67,7 @@ import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionPhraseCollateMatchExists; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; import static org.hamcrest.Matchers.anyOf; @@ -1108,6 +1119,234 @@ public class SuggestSearchIT extends ESIntegTestCase { // assertThat(total, lessThan(1000L)); // Takes many seconds without fix - just for debugging } + @Override + protected Collection> nodePlugins() { + return Collections.singleton(DummyTemplatePlugin.class); + } + + public static class DummyTemplatePlugin extends Plugin implements ScriptPlugin { + @Override + public ScriptEngineService getScriptEngineService(Settings settings) { + return new DummyTemplateScriptEngine(); + } + } + + public static class DummyTemplateScriptEngine implements ScriptEngineService { + + // The collate query setter is hard coded to use mustache, so lets lie in this test about the script plugin, + // which makes the collate code thinks mustache is evaluating the query. + public static final String NAME = "mustache"; + + @Override + public void close() throws IOException { + } + + @Override + public String getType() { + return NAME; + } + + @Override + public String getExtension() { + return NAME; + } + + @Override + public Object compile(String scriptName, String scriptSource, Map params) { + return scriptSource; + } + + @Override + public ExecutableScript executable(CompiledScript compiledScript, Map params) { + String script = (String) compiledScript.compiled(); + for (Entry entry : params.entrySet()) { + script = script.replace("{{" + entry.getKey() + "}}", String.valueOf(entry.getValue())); + } + String result = script; + return new ExecutableScript() { + @Override + public void setNextVar(String name, Object value) { + throw new UnsupportedOperationException("setNextVar not supported"); + } + + @Override + public Object run() { + return new BytesArray(result); + } + }; + } + + @Override + public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) { + throw new UnsupportedOperationException("search script not supported"); + } + + @Override + public boolean isInlineScriptEnabled() { + return true; + } + } + + public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionException, IOException { + CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder() + .put(indexSettings()) + .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. + .put("index.analysis.analyzer.text.tokenizer", "standard") + .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle") + .put("index.analysis.filter.my_shingle.type", "shingle") + .put("index.analysis.filter.my_shingle.output_unigrams", true) + .put("index.analysis.filter.my_shingle.min_shingle_size", 2) + .put("index.analysis.filter.my_shingle.max_shingle_size", 3)); + + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("properties") + .startObject("title") + .field("type", "text") + .field("analyzer", "text") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(builder.addMapping("type1", mapping)); + ensureGreen(); + + List titles = new ArrayList<>(); + + titles.add("United States House of Representatives Elections in Washington 2006"); + titles.add("United States House of Representatives Elections in Washington 2005"); + titles.add("State"); + titles.add("Houses of Parliament"); + titles.add("Representative Government"); + titles.add("Election"); + + List builders = new ArrayList<>(); + for (String title: titles) { + builders.add(client().prepareIndex("test", "type1").setSource("title", title)); + } + indexRandom(true, builders); + + // suggest without collate + PhraseSuggestionBuilder suggest = phraseSuggestion("title") + .addCandidateGenerator(new DirectCandidateGeneratorBuilder("title") + .suggestMode("always") + .maxTermFreq(.99f) + .size(10) + .maxInspections(200) + ) + .confidence(0f) + .maxErrors(2f) + .shardSize(30000) + .size(10); + Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest); + assertSuggestionSize(searchSuggest, 0, 10, "title"); + + // suggest with collate + String filterString = XContentFactory.jsonBuilder() + .startObject() + .startObject("match_phrase") + .field("{{field}}", "{{suggestion}}") + .endObject() + .endObject() + .string(); + PhraseSuggestionBuilder filteredQuerySuggest = suggest.collateQuery(filterString); + filteredQuerySuggest.collateParams(Collections.singletonMap("field", "title")); + searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", filteredQuerySuggest); + assertSuggestionSize(searchSuggest, 0, 2, "title"); + + // collate suggest with no result (boundary case) + searchSuggest = searchSuggest("Elections of Representatives Parliament", "title", filteredQuerySuggest); + assertSuggestionSize(searchSuggest, 0, 0, "title"); + + NumShards numShards = getNumShards("test"); + + // collate suggest with bad query + String incorrectFilterString = XContentFactory.jsonBuilder() + .startObject() + .startObject("test") + .field("title", "{{suggestion}}") + .endObject() + .endObject() + .string(); + PhraseSuggestionBuilder incorrectFilteredSuggest = suggest.collateQuery(incorrectFilterString); + Map> namedSuggestion = new HashMap<>(); + namedSuggestion.put("my_title_suggestion", incorrectFilteredSuggest); + try { + searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); + fail("Post query error has been swallowed"); + } catch(ElasticsearchException e) { + // expected + } + + // suggest with collation + String filterStringAsFilter = XContentFactory.jsonBuilder() + .startObject() + .startObject("match_phrase") + .field("title", "{{suggestion}}") + .endObject() + .endObject() + .string(); + + PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(filterStringAsFilter); + searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", + filteredFilterSuggest); + assertSuggestionSize(searchSuggest, 0, 2, "title"); + + // collate suggest with bad query + String filterStr = XContentFactory.jsonBuilder() + .startObject() + .startObject("pprefix") + .field("title", "{{suggestion}}") + .endObject() + .endObject() + .string(); + + PhraseSuggestionBuilder in = suggest.collateQuery(filterStr); + try { + searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); + fail("Post filter error has been swallowed"); + } catch(ElasticsearchException e) { + //expected + } + + // collate script failure due to no additional params + String collateWithParams = XContentFactory.jsonBuilder() + .startObject() + .startObject("{{query_type}}") + .field("{{query_field}}", "{{suggestion}}") + .endObject() + .endObject() + .string(); + + + PhraseSuggestionBuilder phraseSuggestWithNoParams = suggest.collateQuery(collateWithParams); + try { + searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); + fail("Malformed query (lack of additional params) should fail"); + } catch (ElasticsearchException e) { + // expected + } + + // collate script with additional params + Map params = new HashMap<>(); + params.put("query_type", "match_phrase"); + params.put("query_field", "title"); + + PhraseSuggestionBuilder phraseSuggestWithParams = suggest.collateQuery(collateWithParams).collateParams(params); + searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", + phraseSuggestWithParams); + assertSuggestionSize(searchSuggest, 0, 2, "title"); + + // collate query request with prune set to true + PhraseSuggestionBuilder phraseSuggestWithParamsAndReturn = suggest.collateQuery(collateWithParams).collateParams(params) + .collatePrune(true); + searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", + phraseSuggestWithParamsAndReturn); + assertSuggestionSize(searchSuggest, 0, 10, "title"); + assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2); + } + protected Suggest searchSuggest(String name, SuggestionBuilder suggestion) { return searchSuggest(null, name, suggestion); } diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle index 271faa23608..2d34eacc572 100644 --- a/modules/lang-mustache/build.gradle +++ b/modules/lang-mustache/build.gradle @@ -20,7 +20,7 @@ esplugin { description 'Mustache scripting integration for Elasticsearch' classname 'org.elasticsearch.script.mustache.MustachePlugin' - hasClientJar = true // For the template query + hasClientJar = true // For the template apis and query } dependencies { @@ -31,9 +31,6 @@ integTest { cluster { setting 'script.inline', 'true' setting 'script.stored', 'true' - - - File template = new File('src/test/resources/org/elasticsearch/messy/tests/config/scripts/template_1.mustache') - extraConfigFile 'scripts/template_1.mustache', template + setting 'path.scripts', "${project.buildDir}/resources/test/templates" } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/SearchTemplateIT.java similarity index 53% rename from modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/SearchTemplateIT.java index 90f73da8fb8..ca5ed3e82f4 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/SearchTemplateIT.java @@ -16,137 +16,56 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.action.search.template; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; -import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.template.SearchTemplateAction; -import org.elasticsearch.action.search.template.SearchTemplateRequest; -import org.elasticsearch.action.search.template.SearchTemplateRequestBuilder; -import org.elasticsearch.action.search.template.SearchTemplateResponse; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.script.mustache.TemplateQueryBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.action.search.template.RestSearchTemplateAction; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.mustache.MustachePlugin; import org.elasticsearch.script.mustache.MustacheScriptEngineService; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.script.mustache.TemplateQueryBuilder; +import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; import java.io.IOException; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; /** * Full integration test of the template query plugin. */ -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) -public class TemplateQueryTests extends ESIntegTestCase { +public class SearchTemplateIT extends ESSingleNodeTestCase { @Override - protected Collection> nodePlugins() { + protected Collection> getPlugins() { return Collections.singleton(MustachePlugin.class); } - @Override - protected Collection> transportClientPlugins() { - return nodePlugins(); - } - @Before public void setup() throws IOException { createIndex("test"); - ensureGreen("test"); - - index("test", "testtype", "1", jsonBuilder().startObject().field("text", "value1").endObject()); - index("test", "testtype", "2", jsonBuilder().startObject().field("text", "value2").endObject()); - refresh(); - } - - @Override - public Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(super.nodeSettings(nodeOrdinal)) - .put(Environment.PATH_CONF_SETTING.getKey(), this.getDataPath("config")).build(); - } - - public void testTemplateInBody() throws IOException { - Map vars = new HashMap<>(); - vars.put("template", "all"); - - TemplateQueryBuilder builder = new TemplateQueryBuilder("{\"match_{{template}}\": {}}\"", ScriptType.INLINE,vars); - SearchResponse sr = client().prepareSearch().setQuery(builder) - .execute().actionGet(); - assertHitCount(sr, 2); - } - - public void testTemplateInBodyWithSize() throws IOException { - Map params = new HashMap<>(); - params.put("template", "all"); - SearchResponse sr = client().prepareSearch() - .setSource( - new SearchSourceBuilder().size(0).query( - new TemplateQueryBuilder("{ \"match_{{template}}\": {} }", ScriptType.INLINE, params))) + client().prepareIndex("test", "testtype", "1") + .setSource(jsonBuilder().startObject().field("text", "value1").endObject()) .get(); - assertNoFailures(sr); - assertThat(sr.getHits().hits().length, equalTo(0)); - } - - public void testTemplateWOReplacementInBody() throws IOException { - Map vars = new HashMap<>(); - - TemplateQueryBuilder builder = new TemplateQueryBuilder("{\"match_all\": {}}\"", ScriptType.INLINE, vars); - SearchResponse sr = client().prepareSearch().setQuery(builder) - .execute().actionGet(); - assertHitCount(sr, 2); - } - - public void testTemplateInFile() { - Map vars = new HashMap<>(); - vars.put("template", "all"); - - TemplateQueryBuilder builder = new TemplateQueryBuilder("storedTemplate", ScriptService.ScriptType.FILE, vars); - SearchResponse sr = client().prepareSearch().setQuery(builder) - .execute().actionGet(); - assertHitCount(sr, 2); - } - - public void testRawFSTemplate() throws IOException { - Map params = new HashMap<>(); - params.put("template", "all"); - TemplateQueryBuilder builder = new TemplateQueryBuilder("storedTemplate", ScriptType.FILE, params); - SearchResponse sr = client().prepareSearch().setQuery(builder).get(); - assertHitCount(sr, 2); - } - - public void testSearchRequestTemplateSource() throws Exception { - SearchRequest searchRequest = new SearchRequest(); - searchRequest.indices("_all"); - - String query = "{ \"inline\" : { \"query\": {\"match_{{template}}\": {} } }, \"params\" : { \"template\":\"all\" } }"; - SearchTemplateRequest request = RestSearchTemplateAction.parse(new BytesArray(query)); - request.setRequest(searchRequest); - SearchTemplateResponse response = client().execute(SearchTemplateAction.INSTANCE, request).get(); - assertHitCount(response.getResponse(), 2); + client().prepareIndex("test", "testtype", "2") + .setSource(jsonBuilder().startObject().field("text", "value2").endObject()) + .get(); + client().admin().indices().prepareRefresh().get(); } // Relates to #6318 @@ -173,50 +92,6 @@ public class TemplateQueryTests extends ESIntegTestCase { assertThat(searchResponse.getResponse().getHits().hits().length, equalTo(1)); } - public void testThatParametersCanBeSet() throws Exception { - index("test", "type", "1", jsonBuilder().startObject().field("theField", "foo").endObject()); - index("test", "type", "2", jsonBuilder().startObject().field("theField", "foo 2").endObject()); - index("test", "type", "3", jsonBuilder().startObject().field("theField", "foo 3").endObject()); - index("test", "type", "4", jsonBuilder().startObject().field("theField", "foo 4").endObject()); - index("test", "type", "5", jsonBuilder().startObject().field("otherField", "foo").endObject()); - refresh(); - - Map templateParams = new HashMap<>(); - templateParams.put("mySize", "2"); - templateParams.put("myField", "theField"); - templateParams.put("myValue", "foo"); - - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()) - .setRequest(new SearchRequest("test").types("type")) - .setScript("full-query-template") - .setScriptType(ScriptType.FILE) - .setScriptParams(templateParams) - .get(); - assertHitCount(searchResponse.getResponse(), 4); - // size kicks in here... - assertThat(searchResponse.getResponse().getHits().getHits().length, is(2)); - - templateParams.put("myField", "otherField"); - searchResponse = new SearchTemplateRequestBuilder(client()) - .setRequest(new SearchRequest("test").types("type")) - .setScript("full-query-template") - .setScriptType(ScriptType.FILE) - .setScriptParams(templateParams) - .get(); - assertHitCount(searchResponse.getResponse(), 1); - } - - public void testSearchTemplateQueryFromFile() throws Exception { - SearchRequest searchRequest = new SearchRequest(); - searchRequest.indices("_all"); - String query = "{" + " \"file\": \"full-query-template\"," + " \"params\":{" + " \"mySize\": 2," - + " \"myField\": \"text\"," + " \"myValue\": \"value1\"" + " }" + "}"; - SearchTemplateRequest request = RestSearchTemplateAction.parse(new BytesArray(query)); - request.setRequest(searchRequest); - SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get(); - assertThat(searchResponse.getResponse().getHits().hits().length, equalTo(1)); - } - /** * Test that template can be expressed as a single escaped string. */ @@ -225,7 +100,7 @@ public class TemplateQueryTests extends ESIntegTestCase { searchRequest.indices("_all"); String query = "{" + " \"inline\" : \"{ \\\"size\\\": \\\"{{size}}\\\", \\\"query\\\":{\\\"match_all\\\":{}}}\"," + " \"params\":{" + " \"size\": 1" + " }" + "}"; - SearchTemplateRequest request = RestSearchTemplateAction.parse(new BytesArray(query)); + SearchTemplateRequest request = RestSearchTemplateAction.parse(new BytesArray(query)); request.setRequest(searchRequest); SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get(); assertThat(searchResponse.getResponse().getHits().hits().length, equalTo(1)); @@ -241,7 +116,7 @@ public class TemplateQueryTests extends ESIntegTestCase { String templateString = "{" + " \"inline\" : \"{ {{#use_size}} \\\"size\\\": \\\"{{size}}\\\", {{/use_size}} \\\"query\\\":{\\\"match_all\\\":{}}}\"," + " \"params\":{" + " \"size\": 1," + " \"use_size\": true" + " }" + "}"; - SearchTemplateRequest request = RestSearchTemplateAction.parse(new BytesArray(templateString)); + SearchTemplateRequest request = RestSearchTemplateAction.parse(new BytesArray(templateString)); request.setRequest(searchRequest); SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get(); assertThat(searchResponse.getResponse().getHits().hits().length, equalTo(1)); @@ -257,7 +132,7 @@ public class TemplateQueryTests extends ESIntegTestCase { String templateString = "{" + " \"inline\" : \"{ \\\"query\\\":{\\\"match_all\\\":{}} {{#use_size}}, \\\"size\\\": \\\"{{size}}\\\" {{/use_size}} }\"," + " \"params\":{" + " \"size\": 1," + " \"use_size\": true" + " }" + "}"; - SearchTemplateRequest request = RestSearchTemplateAction.parse(new BytesArray(templateString)); + SearchTemplateRequest request = RestSearchTemplateAction.parse(new BytesArray(templateString)); request.setRequest(searchRequest); SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get(); assertThat(searchResponse.getResponse().getHits().hits().length, equalTo(1)); @@ -268,39 +143,38 @@ public class TemplateQueryTests extends ESIntegTestCase { .setScriptLang(MustacheScriptEngineService.NAME) .setId("testTemplate") .setSource(new BytesArray("{" + - "\"template\":{" + - " \"query\":{" + - " \"match\":{" + - " \"theField\" : \"{{fieldParam}}\"}" + - " }" + - "}" + - "}"))); + "\"template\":{" + + " \"query\":{" + + " \"match\":{" + + " \"theField\" : \"{{fieldParam}}\"}" + + " }" + + "}" + + "}"))); assertAcked(client().admin().cluster().preparePutStoredScript() .setScriptLang(MustacheScriptEngineService.NAME) .setId("testTemplate").setSource(new BytesArray("{" + - "\"template\":{" + - " \"query\":{" + - " \"match\":{" + - " \"theField\" : \"{{fieldParam}}\"}" + - " }" + - "}" + - "}"))); + "\"template\":{" + + " \"query\":{" + + " \"match\":{" + + " \"theField\" : \"{{fieldParam}}\"}" + + " }" + + "}" + + "}"))); GetStoredScriptResponse getResponse = client().admin().cluster() .prepareGetStoredScript(MustacheScriptEngineService.NAME, "testTemplate").get(); assertNotNull(getResponse.getStoredScript()); - List builders = new ArrayList<>(); - - builders.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}")); - builders.add(client().prepareIndex("test", "type", "2").setSource("{\"theField\":\"foo 2\"}")); - builders.add(client().prepareIndex("test", "type", "3").setSource("{\"theField\":\"foo 3\"}")); - builders.add(client().prepareIndex("test", "type", "4").setSource("{\"theField\":\"foo 4\"}")); - builders.add(client().prepareIndex("test", "type", "5").setSource("{\"theField\":\"bar\"}")); - - indexRandom(true, builders); + BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}")); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "2").setSource("{\"theField\":\"foo 2\"}")); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "3").setSource("{\"theField\":\"foo 3\"}")); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "4").setSource("{\"theField\":\"foo 4\"}")); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "5").setSource("{\"theField\":\"bar\"}")); + bulkRequestBuilder.get(); + client().admin().indices().prepareRefresh().get(); Map templateParams = new HashMap<>(); templateParams.put("fieldParam", "foo"); @@ -330,20 +204,20 @@ public class TemplateQueryTests extends ESIntegTestCase { .setScriptLang(MustacheScriptEngineService.NAME) .setId("1a") .setSource(new BytesArray("{" + - "\"template\":{"+ - " \"query\":{" + - " \"match\":{" + - " \"theField\" : \"{{fieldParam}}\"}" + - " }" + - "}" + - "}" + "\"template\":{" + + " \"query\":{" + + " \"match\":{" + + " \"theField\" : \"{{fieldParam}}\"}" + + " }" + + "}" + + "}" )) ); assertAcked(client().admin().cluster().preparePutStoredScript() .setScriptLang(MustacheScriptEngineService.NAME) .setId("2") .setSource(new BytesArray("{" + - "\"template\":{"+ + "\"template\":{" + " \"query\":{" + " \"match\":{" + " \"theField\" : \"{{fieldParam}}\"}" + @@ -355,20 +229,21 @@ public class TemplateQueryTests extends ESIntegTestCase { .setScriptLang(MustacheScriptEngineService.NAME) .setId("3") .setSource(new BytesArray("{" + - "\"template\":{"+ + "\"template\":{" + " \"match\":{" + " \"theField\" : \"{{fieldParam}}\"}" + " }" + "}")) ); - List builders = new ArrayList<>(); - builders.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}")); - builders.add(client().prepareIndex("test", "type", "2").setSource("{\"theField\":\"foo 2\"}")); - builders.add(client().prepareIndex("test", "type", "3").setSource("{\"theField\":\"foo 3\"}")); - builders.add(client().prepareIndex("test", "type", "4").setSource("{\"theField\":\"foo 4\"}")); - builders.add(client().prepareIndex("test", "type", "5").setSource("{\"theField\":\"bar\"}")); - indexRandom(true, builders); + BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}")); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "2").setSource("{\"theField\":\"foo 2\"}")); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "3").setSource("{\"theField\":\"foo 3\"}")); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "4").setSource("{\"theField\":\"foo 4\"}")); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "5").setSource("{\"theField\":\"bar\"}")); + bulkRequestBuilder.get(); + client().admin().indices().prepareRefresh().get(); Map templateParams = new HashMap<>(); templateParams.put("fieldParam", "foo"); @@ -388,12 +263,12 @@ public class TemplateQueryTests extends ESIntegTestCase { .setScriptParams(templateParams) .get()); - expectThrows(IllegalArgumentException.class, () -> new SearchTemplateRequestBuilder(client()) - .setRequest(new SearchRequest().indices("test").types("type")) - .setScript("/myindex/mustache/1") - .setScriptType(ScriptType.STORED) - .setScriptParams(templateParams) - .get()); + expectThrows(IllegalArgumentException.class, () -> new SearchTemplateRequestBuilder(client()) + .setRequest(new SearchRequest().indices("test").types("type")) + .setScript("/myindex/mustache/1") + .setScriptType(ScriptType.STORED) + .setScriptParams(templateParams) + .get()); templateParams.put("fieldParam", "bar"); searchResponse = new SearchTemplateRequestBuilder(client()) @@ -409,17 +284,6 @@ public class TemplateQueryTests extends ESIntegTestCase { SearchResponse sr = client().prepareSearch().setQuery(builder) .execute().actionGet(); assertHitCount(sr, 1); - - // "{\"template\": {\"id\": \"3\",\"params\" : {\"fieldParam\" : \"foo\"}}}"; - Map params = new HashMap<>(); - params.put("fieldParam", "foo"); - TemplateQueryBuilder templateQuery = new TemplateQueryBuilder("3", ScriptType.STORED, params); - sr = client().prepareSearch().setQuery(templateQuery).get(); - assertHitCount(sr, 4); - - templateQuery = new TemplateQueryBuilder("/mustache/3", ScriptType.STORED, params); - sr = client().prepareSearch().setQuery(templateQuery).get(); - assertHitCount(sr, 4); } // Relates to #10397 @@ -427,13 +291,15 @@ public class TemplateQueryTests extends ESIntegTestCase { createIndex("testindex"); ensureGreen("testindex"); - index("testindex", "test", "1", jsonBuilder().startObject().field("searchtext", "dev1").endObject()); - refresh(); + client().prepareIndex("testindex", "test", "1") + .setSource(jsonBuilder().startObject().field("searchtext", "dev1").endObject()) + .get(); + client().admin().indices().prepareRefresh().get(); int iterations = randomIntBetween(2, 11); for (int i = 1; i < iterations; i++) { assertAcked(client().admin().cluster().preparePutStoredScript() - .setScriptLang(MustacheScriptEngineService.NAME) + .setScriptLang(MustacheScriptEngineService.NAME) .setId("git01") .setSource(new BytesArray("{\"template\":{\"query\": {\"match\": {\"searchtext\": {\"query\": \"{{P_Keyword1}}\"," + "\"type\": \"ooophrase_prefix\"}}}}}"))); @@ -466,26 +332,25 @@ public class TemplateQueryTests extends ESIntegTestCase { } public void testIndexedTemplateWithArray() throws Exception { - String multiQuery = "{\"query\":{\"terms\":{\"theField\":[\"{{#fieldParam}}\",\"{{.}}\",\"{{/fieldParam}}\"]}}}"; + String multiQuery = "{\"query\":{\"terms\":{\"theField\":[\"{{#fieldParam}}\",\"{{.}}\",\"{{/fieldParam}}\"]}}}"; + assertAcked( + client().admin().cluster().preparePutStoredScript() + .setScriptLang(MustacheScriptEngineService.NAME) + .setId("4") + .setSource(jsonBuilder().startObject().field("template", multiQuery).endObject().bytes()) + ); + BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}")); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "2").setSource("{\"theField\":\"foo 2\"}")); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "3").setSource("{\"theField\":\"foo 3\"}")); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "4").setSource("{\"theField\":\"foo 4\"}")); + bulkRequestBuilder.add(client().prepareIndex("test", "type", "5").setSource("{\"theField\":\"bar\"}")); + bulkRequestBuilder.get(); + client().admin().indices().prepareRefresh().get(); - assertAcked( - client().admin().cluster().preparePutStoredScript() - .setScriptLang(MustacheScriptEngineService.NAME) - .setId("4") - .setSource(jsonBuilder().startObject().field("template", multiQuery).endObject().bytes()) - ); - List builders = new ArrayList<>(); - builders.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}")); - builders.add(client().prepareIndex("test", "type", "2").setSource("{\"theField\":\"foo 2\"}")); - builders.add(client().prepareIndex("test", "type", "3").setSource("{\"theField\":\"foo 3\"}")); - builders.add(client().prepareIndex("test", "type", "4").setSource("{\"theField\":\"foo 4\"}")); - builders.add(client().prepareIndex("test", "type", "5").setSource("{\"theField\":\"bar\"}")); - - indexRandom(true,builders); - - Map arrayTemplateParams = new HashMap<>(); - String[] fieldParams = {"foo","bar"}; - arrayTemplateParams.put("fieldParam", fieldParams); + Map arrayTemplateParams = new HashMap<>(); + String[] fieldParams = {"foo", "bar"}; + arrayTemplateParams.put("fieldParam", fieldParams); SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()) .setRequest(new SearchRequest("test").types("type")) diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java deleted file mode 100644 index 1e352e1ab4c..00000000000 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.messy.tests; - -import org.elasticsearch.action.search.template.SearchTemplateRequestBuilder; -import org.elasticsearch.action.search.template.SearchTemplateResponse; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.env.Environment; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.mustache.MustachePlugin; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; - -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.ExecutionException; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; - -@ESIntegTestCase.SuiteScopeTestCase -public class RenderSearchTemplateTests extends ESIntegTestCase { - private static final String TEMPLATE_CONTENTS = "{\"size\":\"{{size}}\",\"query\":{\"match\":{\"foo\":\"{{value}}\"}},\"aggs\":{\"objects\":{\"terms\":{\"field\":\"{{value}}\",\"size\":\"{{size}}\"}}}}"; - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(MustachePlugin.class); - } - - @Override - protected Collection> transportClientPlugins() { - return nodePlugins(); - } - - @Override - protected void setupSuiteScopeCluster() throws Exception { - ElasticsearchAssertions.assertAcked(client().admin().cluster().preparePutStoredScript() - .setScriptLang(MustacheScriptEngineService.NAME) - .setId("index_template_1") - .setSource(new BytesArray("{ \"template\": " + TEMPLATE_CONTENTS + " }"))); - } - - @Override - public Settings nodeSettings(int nodeOrdinal) { - Path configDir = createTempDir(); - Path scriptsDir = configDir.resolve("scripts"); - try { - Files.createDirectories(scriptsDir); - Files.write(scriptsDir.resolve("file_template_1.mustache"), TEMPLATE_CONTENTS.getBytes("UTF-8")); - } catch (Exception e) { - throw new RuntimeException(e); - } - return Settings.builder().put(super.nodeSettings(nodeOrdinal)) - .put(Environment.PATH_CONF_SETTING.getKey(), configDir).build(); - } - - public void testInlineTemplate() throws ExecutionException, InterruptedException { - Map params = new HashMap<>(); - params.put("value", "bar"); - params.put("size", 20); - SearchTemplateResponse response = prepareRenderSearchTemplate(TEMPLATE_CONTENTS, ScriptType.INLINE, params).get(); - assertThat(response, notNullValue()); - assertFalse(response.hasResponse()); - BytesReference source = response.getSource(); - assertThat(source, notNullValue()); - Map sourceAsMap = XContentHelper.convertToMap(source, false).v2(); - assertThat(sourceAsMap, notNullValue()); - String expected = TEMPLATE_CONTENTS.replace("{{value}}", "bar").replace("{{size}}", "20"); - Map expectedMap = XContentHelper.convertToMap(new BytesArray(expected), false).v2(); - assertThat(sourceAsMap, equalTo(expectedMap)); - - params = new HashMap<>(); - params.put("value", "baz"); - params.put("size", 100); - response = prepareRenderSearchTemplate(TEMPLATE_CONTENTS, ScriptType.INLINE, params).get(); - assertThat(response, notNullValue()); - assertFalse(response.hasResponse()); - source = response.getSource(); - assertThat(source, notNullValue()); - sourceAsMap = XContentHelper.convertToMap(source, false).v2(); - expected = TEMPLATE_CONTENTS.replace("{{value}}", "baz").replace("{{size}}", "100"); - expectedMap = XContentHelper.convertToMap(new BytesArray(expected), false).v2(); - assertThat(sourceAsMap, equalTo(expectedMap)); - } - - public void testIndexedTemplate() throws ExecutionException, InterruptedException { - Map params = new HashMap<>(); - params.put("value", "bar"); - params.put("size", 20); - SearchTemplateResponse response = prepareRenderSearchTemplate("index_template_1", ScriptType.STORED, params).get(); - assertThat(response, notNullValue()); - assertFalse(response.hasResponse()); - BytesReference source = response.getSource(); - assertThat(source, notNullValue()); - Map sourceAsMap = XContentHelper.convertToMap(source, false).v2(); - assertThat(sourceAsMap, notNullValue()); - String expected = TEMPLATE_CONTENTS.replace("{{value}}", "bar").replace("{{size}}", "20"); - Map expectedMap = XContentHelper.convertToMap(new BytesArray(expected), false).v2(); - assertThat(sourceAsMap, equalTo(expectedMap)); - - params = new HashMap<>(); - params.put("value", "baz"); - params.put("size", 100); - response = prepareRenderSearchTemplate("index_template_1", ScriptType.STORED, params).get(); - assertThat(response, notNullValue()); - source = response.getSource(); - assertThat(source, notNullValue()); - sourceAsMap = XContentHelper.convertToMap(source, false).v2(); - expected = TEMPLATE_CONTENTS.replace("{{value}}", "baz").replace("{{size}}", "100"); - expectedMap = XContentHelper.convertToMap(new BytesArray(expected), false).v2(); - assertThat(sourceAsMap, equalTo(expectedMap)); - } - - public void testFileTemplate() throws ExecutionException, InterruptedException { - Map params = new HashMap<>(); - params.put("value", "bar"); - params.put("size", 20); - SearchTemplateResponse response = prepareRenderSearchTemplate("file_template_1", ScriptType.FILE, params).get(); - assertThat(response, notNullValue()); - assertFalse(response.hasResponse()); - BytesReference source = response.getSource(); - assertThat(source, notNullValue()); - Map sourceAsMap = XContentHelper.convertToMap(source, false).v2(); - assertThat(sourceAsMap, notNullValue()); - String expected = TEMPLATE_CONTENTS.replace("{{value}}", "bar").replace("{{size}}", "20"); - Map expectedMap = XContentHelper.convertToMap(new BytesArray(expected), false).v2(); - assertThat(sourceAsMap, equalTo(expectedMap)); - - params = new HashMap<>(); - params.put("value", "baz"); - params.put("size", 100); - response = prepareRenderSearchTemplate("file_template_1", ScriptType.FILE, params).get(); - assertThat(response, notNullValue()); - source = response.getSource(); - assertThat(source, notNullValue()); - sourceAsMap = XContentHelper.convertToMap(source, false).v2(); - expected = TEMPLATE_CONTENTS.replace("{{value}}", "baz").replace("{{size}}", "100"); - expectedMap = XContentHelper.convertToMap(new BytesArray(expected), false).v2(); - assertThat(sourceAsMap, equalTo(expectedMap)); - } - - private SearchTemplateRequestBuilder prepareRenderSearchTemplate(String script, ScriptType type, Map params) { - return new SearchTemplateRequestBuilder(client()).setScript(script).setScriptType(type).setScriptParams(params).setSimulate(true); - } -} diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java deleted file mode 100644 index bac598ba282..00000000000 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +++ /dev/null @@ -1,254 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.messy.tests; - - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.mustache.MustachePlugin; -import org.elasticsearch.search.suggest.Suggest; -import org.elasticsearch.search.suggest.SuggestBuilder; -import org.elasticsearch.search.suggest.SuggestionBuilder; -import org.elasticsearch.search.suggest.phrase.DirectCandidateGeneratorBuilder; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; -import org.elasticsearch.test.ESIntegTestCase; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.concurrent.ExecutionException; - -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionPhraseCollateMatchExists; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; -import static org.hamcrest.Matchers.equalTo; - -/** - * Integration tests for term and phrase suggestions. Many of these tests many requests that vary only slightly from one another. Where - * possible these tests should declare for the first request, make the request, modify the configuration for the next request, make that - * request, modify again, request again, etc. This makes it very obvious what changes between requests. - */ -public class SuggestSearchTests extends ESIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(MustachePlugin.class); - } - - public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionException, IOException { - CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder() - .put(indexSettings()) - .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. - .put("index.analysis.analyzer.text.tokenizer", "standard") - .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle") - .put("index.analysis.filter.my_shingle.type", "shingle") - .put("index.analysis.filter.my_shingle.output_unigrams", true) - .put("index.analysis.filter.my_shingle.min_shingle_size", 2) - .put("index.analysis.filter.my_shingle.max_shingle_size", 3)); - - XContentBuilder mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("properties") - .startObject("title") - .field("type", "text") - .field("analyzer", "text") - .endObject() - .endObject() - .endObject() - .endObject(); - assertAcked(builder.addMapping("type1", mapping)); - ensureGreen(); - - List titles = new ArrayList<>(); - - titles.add("United States House of Representatives Elections in Washington 2006"); - titles.add("United States House of Representatives Elections in Washington 2005"); - titles.add("State"); - titles.add("Houses of Parliament"); - titles.add("Representative Government"); - titles.add("Election"); - - List builders = new ArrayList<>(); - for (String title: titles) { - builders.add(client().prepareIndex("test", "type1").setSource("title", title)); - } - indexRandom(true, builders); - - // suggest without collate - PhraseSuggestionBuilder suggest = phraseSuggestion("title") - .addCandidateGenerator(new DirectCandidateGeneratorBuilder("title") - .suggestMode("always") - .maxTermFreq(.99f) - .size(10) - .maxInspections(200) - ) - .confidence(0f) - .maxErrors(2f) - .shardSize(30000) - .size(10); - Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest); - assertSuggestionSize(searchSuggest, 0, 10, "title"); - - // suggest with collate - String filterString = XContentFactory.jsonBuilder() - .startObject() - .startObject("match_phrase") - .field("{{field}}", "{{suggestion}}") - .endObject() - .endObject() - .string(); - PhraseSuggestionBuilder filteredQuerySuggest = suggest.collateQuery(filterString); - filteredQuerySuggest.collateParams(Collections.singletonMap("field", "title")); - searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", filteredQuerySuggest); - assertSuggestionSize(searchSuggest, 0, 2, "title"); - - // collate suggest with no result (boundary case) - searchSuggest = searchSuggest("Elections of Representatives Parliament", "title", filteredQuerySuggest); - assertSuggestionSize(searchSuggest, 0, 0, "title"); - - NumShards numShards = getNumShards("test"); - - // collate suggest with bad query - String incorrectFilterString = XContentFactory.jsonBuilder() - .startObject() - .startObject("test") - .field("title", "{{suggestion}}") - .endObject() - .endObject() - .string(); - PhraseSuggestionBuilder incorrectFilteredSuggest = suggest.collateQuery(incorrectFilterString); - Map> namedSuggestion = new HashMap<>(); - namedSuggestion.put("my_title_suggestion", incorrectFilteredSuggest); - try { - searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); - fail("Post query error has been swallowed"); - } catch(ElasticsearchException e) { - // expected - } - - // suggest with collation - String filterStringAsFilter = XContentFactory.jsonBuilder() - .startObject() - .startObject("match_phrase") - .field("title", "{{suggestion}}") - .endObject() - .endObject() - .string(); - - PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(filterStringAsFilter); - searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", - filteredFilterSuggest); - assertSuggestionSize(searchSuggest, 0, 2, "title"); - - // collate suggest with bad query - String filterStr = XContentFactory.jsonBuilder() - .startObject() - .startObject("pprefix") - .field("title", "{{suggestion}}") - .endObject() - .endObject() - .string(); - - PhraseSuggestionBuilder in = suggest.collateQuery(filterStr); - try { - searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); - fail("Post filter error has been swallowed"); - } catch(ElasticsearchException e) { - //expected - } - - // collate script failure due to no additional params - String collateWithParams = XContentFactory.jsonBuilder() - .startObject() - .startObject("{{query_type}}") - .field("{{query_field}}", "{{suggestion}}") - .endObject() - .endObject() - .string(); - - - PhraseSuggestionBuilder phraseSuggestWithNoParams = suggest.collateQuery(collateWithParams); - try { - searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); - fail("Malformed query (lack of additional params) should fail"); - } catch (ElasticsearchException e) { - // expected - } - - // collate script with additional params - Map params = new HashMap<>(); - params.put("query_type", "match_phrase"); - params.put("query_field", "title"); - - PhraseSuggestionBuilder phraseSuggestWithParams = suggest.collateQuery(collateWithParams).collateParams(params); - searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", - phraseSuggestWithParams); - assertSuggestionSize(searchSuggest, 0, 2, "title"); - - // collate query request with prune set to true - PhraseSuggestionBuilder phraseSuggestWithParamsAndReturn = suggest.collateQuery(collateWithParams).collateParams(params) - .collatePrune(true); - searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", - phraseSuggestWithParamsAndReturn); - assertSuggestionSize(searchSuggest, 0, 10, "title"); - assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2); - } - - protected Suggest searchSuggest(String name, SuggestionBuilder suggestion) { - return searchSuggest(null, name, suggestion); - } - - protected Suggest searchSuggest(String suggestText, String name, SuggestionBuilder suggestion) { - Map> map = new HashMap<>(); - map.put(name, suggestion); - return searchSuggest(suggestText, 0, map); - } - - protected Suggest searchSuggest(String suggestText, int expectShardsFailed, Map> suggestions) { - SearchRequestBuilder builder = client().prepareSearch().setSize(0); - SuggestBuilder suggestBuilder = new SuggestBuilder(); - if (suggestText != null) { - suggestBuilder.setGlobalText(suggestText); - } - for (Entry> suggestion : suggestions.entrySet()) { - suggestBuilder.addSuggestion(suggestion.getKey(), suggestion.getValue()); - } - builder.suggest(suggestBuilder); - SearchResponse actionGet = builder.execute().actionGet(); - assertThat(Arrays.toString(actionGet.getShardFailures()), actionGet.getFailedShards(), equalTo(expectShardsFailed)); - return actionGet.getSuggest(); - } -} diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java deleted file mode 100644 index 9c09e9245f6..00000000000 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.messy.tests; - -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.Accountable; -import org.elasticsearch.Version; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.inject.ModulesBuilder; -import org.elasticsearch.common.inject.multibindings.Multibinder; -import org.elasticsearch.common.inject.util.Providers; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisService; -import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.script.mustache.TemplateQueryBuilder; -import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.similarity.SimilarityService; -import org.elasticsearch.indices.IndicesModule; -import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; -import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.script.ScriptModule; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; -import org.elasticsearch.search.SearchModule; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.threadpool.ThreadPool; -import org.junit.After; -import org.junit.Before; - -import java.io.IOException; -import java.lang.reflect.Proxy; -import java.util.Collections; -import java.util.List; -import java.util.function.Supplier; - -import static java.util.Collections.emptyList; -import static java.util.Collections.singletonList; -import static org.hamcrest.Matchers.containsString; - -/** - * Test parsing and executing a template request. - */ -// NOTE: this can't be migrated to ESSingleNodeTestCase because of the custom path.conf -public class TemplateQueryParserTests extends ESTestCase { - - private Injector injector; - private Supplier contextFactory; - - @Before - public void setup() throws IOException { - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(Environment.PATH_CONF_SETTING.getKey(), this.getDataPath("config")) - .put("node.name", getClass().getName()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) - .build(); - Environment environment = new Environment(settings); - final Client proxy = (Client) Proxy.newProxyInstance( - Client.class.getClassLoader(), - new Class[]{Client.class}, (proxy1, method, args) -> { - throw new UnsupportedOperationException("client is just a dummy"); - }); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - Index index = idxSettings.getIndex(); - // TODO: make this use a mock engine instead of mustache and it will no longer be messy! - ScriptModule scriptModule = new ScriptModule(settings, environment, null, singletonList(new MustacheScriptEngineService(settings)), - emptyList()); - List> scriptSettings = scriptModule.getSettings(); - scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED); - SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList()); - injector = new ModulesBuilder().add( - (b) -> { - b.bind(ThreadPool.class).toInstance(new ThreadPool(settings)); - b.bind(Client.class).toInstance(proxy); // not needed here - Multibinder.newSetBinder(b, ScoreFunctionParser.class); - b.bind(ClusterService.class).toProvider(Providers.of((ClusterService) null)); - b.bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class); - }, - settingsModule, - new SearchModule(settings, new NamedWriteableRegistry(), false, emptyList()) { - @Override - protected void configureSearch() { - // skip so we don't need transport - } - }, - new IndexSettingsModule(index, settings) - ).createInjector(); - - AnalysisService analysisService = createAnalysisService(idxSettings, settings); - SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); - MapperRegistry mapperRegistry = new IndicesModule(new NamedWriteableRegistry(), Collections.emptyList()).getMapperRegistry(); - MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, () -> - contextFactory.get()); - IndicesFieldDataCache cache = new IndicesFieldDataCache(settings, new IndexFieldDataCache.Listener() {}); - IndexFieldDataService indexFieldDataService = new IndexFieldDataService(idxSettings, cache, injector.getInstance(CircuitBreakerService.class), mapperService); - BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new BitsetFilterCache.Listener() { - @Override - public void onCache(ShardId shardId, Accountable accountable) {} - @Override - public void onRemoval(ShardId shardId, Accountable accountable) {} - }); - IndicesQueriesRegistry indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); - contextFactory = () -> new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, - similarityService, scriptModule.getScriptService(), indicesQueriesRegistry, proxy, null, null); - } - - @Override - @After - public void tearDown() throws Exception { - super.tearDown(); - terminate(injector.getInstance(ThreadPool.class)); - } - - public void testParser() throws IOException { - String templateString = "{" + "\"inline\":{\"match_{{template}}\": {}}," + "\"params\":{\"template\":\"all\"}" + "}"; - - XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); - QueryShardContext context = contextFactory.get(); - templateSourceParser.nextToken(); - - Query query = QueryBuilder.rewriteQuery(TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)).get(), - context).toQuery(context); - assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); - } - - public void testParseTemplateAsSingleStringWithConditionalClause() throws IOException { - String templateString = "{" + " \"inline\" : \"{ \\\"match_{{#use_it}}{{template}}{{/use_it}}\\\":{} }\"," + " \"params\":{" - + " \"template\":\"all\"," + " \"use_it\": true" + " }" + "}"; - XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); - QueryShardContext context = contextFactory.get(); - - Query query = QueryBuilder - .rewriteQuery(TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)).get(), context) - .toQuery(context); - assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); - } - - /** - * Test that the template query parser can parse and evaluate template - * expressed as a single string but still it expects only the query - * specification (thus this test should fail with specific exception). - */ - public void testParseTemplateFailsToParseCompleteQueryAsSingleString() throws IOException { - String templateString = "{" + " \"inline\" : \"{ \\\"size\\\": \\\"{{size}}\\\", \\\"query\\\":{\\\"match_all\\\":{}}}\"," - + " \"params\":{" + " \"size\":2" + " }\n" + "}"; - - XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); - QueryShardContext context = contextFactory.get(); - - try { - TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)).get().rewrite(context); - fail("Expected ParsingException"); - } catch (ParsingException e) { - assertThat(e.getMessage(), containsString("query malformed, no field after start_object")); - } - } - - public void testParserCanExtractTemplateNames() throws Exception { - String templateString = "{ \"file\": \"storedTemplate\" ,\"params\":{\"template\":\"all\" } } "; - - XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); - QueryShardContext context = contextFactory.get(); - templateSourceParser.nextToken(); - - - Query query = QueryBuilder.rewriteQuery(TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)).get(), - context).toQuery(context); - assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); - } - - public void testMustRewrite() throws Exception { - String templateString = "{ \"file\": \"storedTemplate\" ,\"params\":{\"template\":\"all\" } } "; - - XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); - QueryShardContext context = contextFactory.get(); - templateSourceParser.nextToken(); - try { - TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)).get().toQuery(context); - fail(); - } catch (UnsupportedOperationException ex) { - assertEquals("this query must be rewritten first", ex.getMessage()); - } - } -} diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java deleted file mode 100644 index a2325b2d511..00000000000 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -/** - * This package contains tests that use mustache to test what looks - * to be unrelated functionality, or functionality that should be - * tested with a mock instead. Instead of doing an epic battle - * with these tests, they are temporarily moved here to the mustache - * module's tests, but that is likely not where they belong. Please - * help by cleaning them up and we can remove this package! - * - *
    - *
  • If the test is actually testing mustache specifically, move to - * the org.elasticsearch.script.mustache tests package of this module
  • - *
  • If the test is testing templating integration with another core subsystem, - * fix it to use a mock instead, so it can be in the core tests again
  • - *
  • If the test is just being lazy, and does not really need templating to test - * something, clean it up!
  • - *
- */ -/* List of renames that took place: -renamed: core/src/test/java/org/elasticsearch/validate/RenderSearchTemplateIT.java -> modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java -renamed: core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java -> modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java -renamed: core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java -> modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java -renamed: core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java -> modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java -renamed: core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java -> module/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java - ^^^^^ note: just the methods from this test using mustache were moved here, the others use groovy and are in the groovy module under its messy tests package. -renamed: rest-api-spec/test/msearch/10_basic.yaml -> module/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_messy_test_msearch.yaml - */ - -package org.elasticsearch.messy.tests; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index bdba0346a85..6c85468ea4a 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -19,9 +19,18 @@ package org.elasticsearch.script.mustache; import com.github.mustachejava.MustacheFactory; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -72,6 +81,27 @@ public class MustacheScriptEngineTests extends ESTestCase { } } + public void testSimple() throws IOException { + String templateString = "{" + "\"inline\":{\"match_{{template}}\": {}}," + "\"params\":{\"template\":\"all\"}" + "}"; + XContentParser parser = XContentFactory.xContent(templateString).createParser(templateString); + Script script = Script.parse(parser, new ParseFieldMatcher(false)); + CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, null, "mustache", + qe.compile(null, script.getScript(), Collections.emptyMap())); + ExecutableScript executableScript = qe.executable(compiledScript, script.getParams()); + assertThat(((BytesReference) executableScript.run()).utf8ToString(), equalTo("{\"match_all\":{}}")); + } + + public void testParseTemplateAsSingleStringWithConditionalClause() throws IOException { + String templateString = "{" + " \"inline\" : \"{ \\\"match_{{#use_it}}{{template}}{{/use_it}}\\\":{} }\"," + " \"params\":{" + + " \"template\":\"all\"," + " \"use_it\": true" + " }" + "}"; + XContentParser parser = XContentFactory.xContent(templateString).createParser(templateString); + Script script = Script.parse(parser, new ParseFieldMatcher(false)); + CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, null, "mustache", + qe.compile(null, script.getScript(), Collections.emptyMap())); + ExecutableScript executableScript = qe.executable(compiledScript, script.getParams()); + assertThat(((BytesReference) executableScript.run()).utf8ToString(), equalTo("{ \"match_all\":{} }")); + } + public void testEscapeJson() throws IOException { { StringWriter writer = new StringWriter(); diff --git a/modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/full-query-template.mustache b/modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/full-query-template.mustache deleted file mode 100644 index 519141472f0..00000000000 --- a/modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/full-query-template.mustache +++ /dev/null @@ -1,6 +0,0 @@ -{ - "query": { - "match": { "{{myField}}" : "{{myValue}}" } - }, - "size" : {{mySize}} -} diff --git a/modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/storedTemplate.mustache b/modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/storedTemplate.mustache deleted file mode 100644 index a779da7c467..00000000000 --- a/modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/storedTemplate.mustache +++ /dev/null @@ -1,3 +0,0 @@ -{ - "match_{{template}}": {} -} diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_render_search_template.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/20_render_search_template.yaml similarity index 72% rename from modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_render_search_template.yaml rename to modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/20_render_search_template.yaml index 658d5ed7a90..9a9a1b0c017 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_render_search_template.yaml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/20_render_search_template.yaml @@ -1,5 +1,5 @@ --- -"Indexed Template validate tests": +"Stored Template validate tests": - do: put_template: @@ -29,6 +29,23 @@ - match: { template_output.query.match.text: "bar" } - match: { template_output.aggs.my_terms.terms.field: "field1" } +--- +"File Template validate tests": + + - do: + render_search_template: + body: { "file": "file_search_template", "params": { "my_value": "foo", "my_field": "field1" } } + + - match: { template_output.query.match.text: "foo" } + - match: { template_output.aggs.my_terms.terms.field: "field1" } + + - do: + render_search_template: + body: { "file": "file_search_template", "params": { "my_value": "bar", "my_field": "my_other_field" } } + + - match: { template_output.query.match.text: "bar" } + - match: { template_output.aggs.my_terms.terms.field: "my_other_field" } + --- "Inline Template validate tests": @@ -51,7 +68,7 @@ render_search_template: body: { "inline": { "query": { "match": { "text": "{{{my_value}}" } }, "aggs": { "my_terms": { "terms": { "field": "{{my_field}}" } } } }, "params": { "my_value": "bar", "my_field": "field1" } } --- -"Escaped Indexed Template validate tests": +"Escaped Stored Template validate tests": - do: put_template: @@ -102,3 +119,38 @@ catch: /Improperly.closed.variable.in.query-template/ render_search_template: body: { "inline": "{ \"query\": { \"match\": { \"text\": \"{{{my_value}}\" } }, \"size\": {{my_size}} }", "params": { "my_value": "bar", "my_size": 100 } } + +--- +"Indexed Template query tests": + + - do: + index: + index: test + type: testtype + id: 1 + body: { "text": "value1_foo" } + - do: + index: + index: test + type: testtype + id: 2 + body: { "text": "value2_foo value3_foo" } + - do: + indices.refresh: {} + + - do: + put_template: + id: "1" + body: { "template": { "query": { "match" : { "text": "{{my_value}}" } }, "size": "{{my_size}}" } } + - match: { acknowledged: true } + + - do: + search_template: + body: { "id" : "1", "params" : { "my_value" : "value1_foo", "my_size" : 1 } } + - match: { hits.total: 1 } + + - do: + catch: /Unable.to.find.on.disk.file.script.\[simple1\].using.lang.\[mustache\]/ + search_template: + body: { "file" : "simple1"} + diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/20_search.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/20_search.yaml deleted file mode 100644 index 4c38dceb14d..00000000000 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/20_search.yaml +++ /dev/null @@ -1,34 +0,0 @@ ---- -"Indexed Template query tests": - - - do: - index: - index: test - type: testtype - id: 1 - body: { "text": "value1_foo" } - - do: - index: - index: test - type: testtype - id: 2 - body: { "text": "value2_foo value3_foo" } - - do: - indices.refresh: {} - - - do: - put_template: - id: "1" - body: { "template": { "query": { "match" : { "text": "{{my_value}}" } }, "size": "{{my_size}}" } } - - match: { acknowledged: true } - - - do: - search_template: - body: { "id" : "1", "params" : { "my_value" : "value1_foo", "my_size" : 1 } } - - match: { hits.total: 1 } - - - do: - catch: /Unable.to.find.on.disk.file.script.\[simple1\].using.lang.\[mustache\]/ - search_template: - body: { "file" : "simple1"} - diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_search_template.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_search_template.yaml new file mode 100644 index 00000000000..c7e4c91b4cc --- /dev/null +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_search_template.yaml @@ -0,0 +1,108 @@ +--- +"Template search request": + + - do: + index: + index: test + type: testtype + id: 1 + body: { "text": "value1" } + - do: + index: + index: test + type: testtype + id: 2 + body: { "text": "value2" } + - do: + indices.refresh: {} + + - do: + search_template: + body: { "inline" : { "query": { "term": { "text": { "value": "{{template}}" } } } }, "params": { "template": "value1" } } + + - match: { hits.total: 1 } + + - do: + search_template: + body: { "file" : "file_search_template", "params": { "my_value": "value1", "my_field" : "_type" } } + + - match: { hits.total: 1 } + + - do: + put_template: + id: "1" + body: { "template": { "query": { "term": { "text": "{{template}}" } } } } + - match: { acknowledged: true } + + - do: + search_template: + body: { "id" : "1", "params": { "template": "value1" } } + - match: { hits.total: 1 } + + - do: + search_template: + body: { "inline" : { "query": { "match_{{template}}": {} } }, "params" : { "template" : "all" } } + + - match: { hits.total: 2 } + +--- +"Missing template search request": + + - do: + catch: missing + search_template: + body: { "id" : "unknown", "params": { "template": "value1" } } + +--- +"Test that parameters can be used": + + - do: + index: + index: test + type: type + id: 1 + body: { "theField": "foo" } + + - do: + index: + index: test + type: type + id: 2 + body: { "theField": "foo 2" } + + - do: + index: + index: test + type: type + id: 3 + body: { "theField": "foo 3" } + + - do: + index: + index: test + type: type + id: 4 + body: { "theField": "foo 4" } + + - do: + index: + index: test + type: type + id: 5 + body: { "otherField": "foo" } + - do: + indices.refresh: {} + + - do: + search_template: + body: { "file" : "template_1", "params": { "size": "2", "field": "theField", "value": "foo" } } + + - match: { hits.total: 4 } + - length: { hits.hits: 2 } + + - do: + search_template: + body: { "file" : "template_1", "params": { "size": "2", "field": "otherField", "value": "foo" } } + + - match: { hits.total: 1 } + - length: { hits.hits: 1 } diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/40_search_request_template.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/40_search_request_template.yaml deleted file mode 100644 index 8d3154cc8d4..00000000000 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/40_search_request_template.yaml +++ /dev/null @@ -1,38 +0,0 @@ ---- -"Template search request": - - - do: - index: - index: test - type: testtype - id: 1 - body: { "text": "value1" } - - do: - index: - index: test - type: testtype - id: 2 - body: { "text": "value2" } - - do: - indices.refresh: {} - - - do: - search_template: - body: { "inline" : { "query": { "term": { "text": { "value": "{{template}}" } } } }, "params": { "template": "value1" } } - - - match: { hits.total: 1 } - - - do: - search_template: - body: { "inline" : { "query": { "match_{{template}}": {} } }, "params" : { "template" : "all" } } - - - match: { hits.total: 2 } - ---- -"Missing template search request": - - - do: - catch: missing - search_template: - body: { "id" : "unknown", "params": { "template": "value1" } } - diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_template_query_execution.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/40_template_query.yaml similarity index 62% rename from modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_template_query_execution.yaml rename to modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/40_template_query.yaml index 62fca6a8ebd..cf3e6883e45 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_template_query_execution.yaml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/40_template_query.yaml @@ -16,12 +16,36 @@ - do: indices.refresh: {} + - do: + put_template: + id: "1" + body: { "template": { "match": { "text": "{{my_value}}" } } } + - match: { acknowledged: true } + - do: search: body: { "query": { "template": { "inline": { "term": { "text": { "value": "{{template}}" } } }, "params": { "template": "value1" } } } } - match: { hits.total: 1 } + - do: + search: + body: { "query": { "template": { "file": "file_query_template", "params": { "my_value": "value1" } } } } + + - match: { hits.total: 1 } + + - do: + search: + body: { "query": { "template": { "id": "1", "params": { "my_value": "value1" } } } } + + - match: { hits.total: 1 } + + - do: + search: + body: { "query": { "template": { "id": "/mustache/1", "params": { "my_value": "value1" } } } } + + - match: { hits.total: 1 } + - do: search: body: { "query": { "template": { "inline": {"match_{{template}}": {}}, "params" : { "template" : "all" } } } } @@ -40,9 +64,13 @@ - match: { hits.total: 2 } + - do: + search: + body: { "query": { "template": { "inline": "{\"match_all\": {}}", "params" : {} } } } + + - match: { hits.total: 2 } + - do: search: body: { "query": { "template": { "inline": "{\"query_string\": { \"query\" : \"{{query}}\" }}", "params" : { "query" : "text:\"value2 value3\"" } } } } - - - match: { hits.total: 1 } diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_messy_test_msearch.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_messy_test_msearch.yaml deleted file mode 100644 index 24d1ec1efb6..00000000000 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_messy_test_msearch.yaml +++ /dev/null @@ -1,33 +0,0 @@ ---- -"Basic multi-search with template query": - - do: - index: - index: test_1 - type: test - id: 1 - body: { foo: bar } - - - do: - index: - index: test_1 - type: test - id: 2 - body: { foo: baz } - - - do: - index: - index: test_1 - type: test - id: 3 - body: { foo: foo } - - - do: - indices.refresh: {} - - - do: - msearch: - body: - - index: test_1 - - query: { "template": { "inline": { "term": { "foo": { "value": "{{template}}" } } }, "params": { "template": "bar" } } } - - match: { responses.0.hits.total: 1 } - diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/10_basic.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yaml similarity index 64% rename from modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/10_basic.yaml rename to modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yaml index ab42133a2b2..a22b101bf67 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/10_basic.yaml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yaml @@ -124,3 +124,74 @@ setup: - inline: '{"query": {"match": {"foo": "{{value}}"} } }' params: value: "bar" + +--- +"Basic multi-search using stored template": + + - do: + put_template: + id: stored_template_1 + body: {"template": {"query": {"match": {"{{field}}": "{{value}}" }}}} + - match: { acknowledged: true } + + - do: + msearch_template: + body: + - index: index_* + - id: stored_template_1 + params: + field: "foo" + value: "foo" + - index: _all + - id: stored_template_1 + params: + field: "foo" + value: "bar" + - index: index_2 + - id: stored_template_1 + params: + field: "foo" + value: "foo" + + - match: { responses.0.hits.total: 2 } + - match: { responses.1.hits.total: 1 } + - match: { responses.2.hits.total: 1 } + +--- +"Basic multi-search using file template": + + - do: + render_search_template: + body: { "file": "template_1", "params": { "field": "foo", "value": "bar", "size": 20 } } + + - match: { template_output.query.match.foo.query: "bar" } + - match: { template_output.query.match.foo.operator: "or" } + - match: { template_output.size: 20 } + + - do: + msearch_template: + body: + - index: index_* + - file: template_1 + params: + field: "foo" + value: "foo" + size: 10 + - index: _all + - file: template_1 + params: + field: "foo" + value: "bar" + operator: "and" + size: 50 + - index: index_2 + - file: template_1 + params: + field: "foo" + value: "foo" + size: 0 + + - match: { responses.0.hits.total: 2 } + - match: { responses.1.hits.total: 1 } + - match: { responses.2.hits.total: 1 } + diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/20_stored_template.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/20_stored_template.yaml deleted file mode 100644 index ffbe9200375..00000000000 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/20_stored_template.yaml +++ /dev/null @@ -1,66 +0,0 @@ ---- -setup: - - - do: - index: - index: index_1 - type: test - id: 1 - body: { foo: bar } - - - do: - index: - index: index_1 - type: test - id: 2 - body: { foo: baz } - - - do: - index: - index: index_1 - type: test - id: 3 - body: { foo: foo } - - - do: - index: - index: index_2 - type: test - id: 1 - body: { foo: foo } - - - do: - indices.refresh: {} - ---- -"Basic multi-search using stored template": - - - do: - put_template: - id: stored_template_1 - body: {"template": {"query": {"match": {"{{field}}": "{{value}}" }}}} - - match: { acknowledged: true } - - - do: - msearch_template: - body: - - index: index_* - - id: stored_template_1 - params: - field: "foo" - value: "foo" - - index: _all - - id: stored_template_1 - params: - field: "foo" - value: "bar" - - index: index_2 - - id: stored_template_1 - params: - field: "foo" - value: "foo" - - - match: { responses.0.hits.total: 2 } - - match: { responses.1.hits.total: 1 } - - match: { responses.2.hits.total: 1 } - diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/30_file_template.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/30_file_template.yaml deleted file mode 100644 index a5d11898baf..00000000000 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/30_file_template.yaml +++ /dev/null @@ -1,72 +0,0 @@ ---- -setup: - - - do: - index: - index: index_1 - type: test - id: 1 - body: { foo: bar } - - - do: - index: - index: index_1 - type: test - id: 2 - body: { foo: baz } - - - do: - index: - index: index_1 - type: test - id: 3 - body: { foo: foo } - - - do: - index: - index: index_2 - type: test - id: 1 - body: { foo: foo } - - - do: - indices.refresh: {} - ---- -"Basic multi-search using file template": - - - do: - render_search_template: - body: { "file": "template_1", "params": { "field": "foo", "value": "bar", "size": 20 } } - - - match: { template_output.query.match.foo.query: "bar" } - - match: { template_output.query.match.foo.operator: "or" } - - match: { template_output.size: 20 } - - - do: - msearch_template: - body: - - index: index_* - - file: template_1 - params: - field: "foo" - value: "foo" - size: 10 - - index: _all - - file: template_1 - params: - field: "foo" - value: "bar" - operator: "and" - size: 50 - - index: index_2 - - file: template_1 - params: - field: "foo" - value: "foo" - size: 0 - - - match: { responses.0.hits.total: 2 } - - match: { responses.1.hits.total: 1 } - - match: { responses.2.hits.total: 1 } - diff --git a/modules/lang-mustache/src/test/resources/templates/file_query_template.mustache b/modules/lang-mustache/src/test/resources/templates/file_query_template.mustache new file mode 100644 index 00000000000..f083a132133 --- /dev/null +++ b/modules/lang-mustache/src/test/resources/templates/file_query_template.mustache @@ -0,0 +1,5 @@ +{ + "match": { + "text": "{{my_value}}" + } +} diff --git a/modules/lang-mustache/src/test/resources/templates/file_search_template.mustache b/modules/lang-mustache/src/test/resources/templates/file_search_template.mustache new file mode 100644 index 00000000000..beaf32ac0f1 --- /dev/null +++ b/modules/lang-mustache/src/test/resources/templates/file_search_template.mustache @@ -0,0 +1,12 @@ +{ + "query": { + "match": { + "text": "{{my_value}}" + } + }, + "aggs": { + "my_terms": { + "terms": { "field": "{{my_field}}" } + } + } +} diff --git a/modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/template_1.mustache b/modules/lang-mustache/src/test/resources/templates/template_1.mustache similarity index 99% rename from modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/template_1.mustache rename to modules/lang-mustache/src/test/resources/templates/template_1.mustache index 4b1c02f3b24..29174be2ff0 100644 --- a/modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/template_1.mustache +++ b/modules/lang-mustache/src/test/resources/templates/template_1.mustache @@ -8,4 +8,4 @@ } }, "size": {{size}} -} \ No newline at end of file +} From a9ab095b8cd176bc8753dce969af68e340cfe3bd Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 25 Jul 2016 18:50:12 +0200 Subject: [PATCH 54/93] Moved all mustache classes into one package. No need for multiple packages inside a small module. --- .../src/main/resources/checkstyle_suppressions.xml | 7 ------- .../mustache}/MultiSearchTemplateAction.java | 2 +- .../mustache}/MultiSearchTemplateRequest.java | 2 +- .../mustache}/MultiSearchTemplateRequestBuilder.java | 2 +- .../mustache}/MultiSearchTemplateResponse.java | 2 +- .../elasticsearch/script/mustache/MustachePlugin.java | 10 ---------- .../mustache}/RestDeleteSearchTemplateAction.java | 2 +- .../mustache}/RestGetSearchTemplateAction.java | 2 +- .../mustache}/RestMultiSearchTemplateAction.java | 5 +---- .../mustache}/RestPutSearchTemplateAction.java | 2 +- .../mustache}/RestRenderSearchTemplateAction.java | 4 +--- .../mustache}/RestSearchTemplateAction.java | 4 +--- .../mustache}/SearchTemplateAction.java | 3 +-- .../mustache}/SearchTemplateRequest.java | 2 +- .../mustache}/SearchTemplateRequestBuilder.java | 2 +- .../mustache}/SearchTemplateResponse.java | 2 +- .../mustache}/TransportMultiSearchTemplateAction.java | 2 +- .../mustache}/TransportSearchTemplateAction.java | 3 +-- .../mustache}/MultiSearchTemplateIT.java | 3 +-- .../mustache}/MultiSearchTemplateRequestTests.java | 5 ++--- .../script/mustache/MustacheScriptEngineTests.java | 10 ++++------ .../elasticsearch/script/mustache/MustacheTests.java | 11 ++++++----- .../mustache}/SearchTemplateIT.java | 6 +----- .../mustache}/SearchTemplateRequestTests.java | 3 +-- .../mustache}/simple-msearch-template.json | 0 25 files changed, 31 insertions(+), 65 deletions(-) rename modules/lang-mustache/src/main/java/org/elasticsearch/{action/search/template => script/mustache}/MultiSearchTemplateAction.java (96%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{action/search/template => script/mustache}/MultiSearchTemplateRequest.java (98%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{action/search/template => script/mustache}/MultiSearchTemplateRequestBuilder.java (98%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{action/search/template => script/mustache}/MultiSearchTemplateResponse.java (99%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{rest/action/search/template => script/mustache}/RestDeleteSearchTemplateAction.java (96%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{rest/action/search/template => script/mustache}/RestGetSearchTemplateAction.java (96%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{rest/action/search/template => script/mustache}/RestMultiSearchTemplateAction.java (93%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{rest/action/search/template => script/mustache}/RestPutSearchTemplateAction.java (96%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{rest/action/search/template => script/mustache}/RestRenderSearchTemplateAction.java (92%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{rest/action/search/template => script/mustache}/RestSearchTemplateAction.java (96%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{action/search/template => script/mustache}/SearchTemplateAction.java (93%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{action/search/template => script/mustache}/SearchTemplateRequest.java (99%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{action/search/template => script/mustache}/SearchTemplateRequestBuilder.java (97%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{action/search/template => script/mustache}/SearchTemplateResponse.java (98%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{action/search/template => script/mustache}/TransportMultiSearchTemplateAction.java (98%) rename modules/lang-mustache/src/main/java/org/elasticsearch/{action/search/template => script/mustache}/TransportSearchTemplateAction.java (97%) rename modules/lang-mustache/src/test/java/org/elasticsearch/{action/search/template => script/mustache}/MultiSearchTemplateIT.java (98%) rename modules/lang-mustache/src/test/java/org/elasticsearch/{action/search/template => script/mustache}/MultiSearchTemplateRequestTests.java (95%) rename modules/lang-mustache/src/test/java/org/elasticsearch/{action/search/template => script/mustache}/SearchTemplateIT.java (98%) rename modules/lang-mustache/src/test/java/org/elasticsearch/{action/search/template => script/mustache}/SearchTemplateRequestTests.java (98%) rename modules/lang-mustache/src/test/resources/org/elasticsearch/{action/search/template => script/mustache}/simple-msearch-template.json (100%) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index b629abbe493..0203ab9de76 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -1115,13 +1115,6 @@ - - - - - - - diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java similarity index 96% rename from modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateAction.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java index f5ab172f8ff..99eb9709f73 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java similarity index 98% rename from modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateRequest.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java index 6fce2b2d492..b1db44defa0 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateRequest.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateRequestBuilder.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestBuilder.java similarity index 98% rename from modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateRequestBuilder.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestBuilder.java index 5b0f802ab83..4624e8caa24 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateRequestBuilder.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java similarity index 99% rename from modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateResponse.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java index f9e99ffc7e3..6201e9305ef 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionResponse; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index 5d7b3b4fb56..170070564f9 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -21,22 +21,12 @@ package org.elasticsearch.script.mustache; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.search.template.MultiSearchTemplateAction; -import org.elasticsearch.action.search.template.SearchTemplateAction; -import org.elasticsearch.action.search.template.TransportMultiSearchTemplateAction; -import org.elasticsearch.action.search.template.TransportSearchTemplateAction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.rest.action.search.template.RestDeleteSearchTemplateAction; -import org.elasticsearch.rest.action.search.template.RestGetSearchTemplateAction; -import org.elasticsearch.rest.action.search.template.RestMultiSearchTemplateAction; -import org.elasticsearch.rest.action.search.template.RestPutSearchTemplateAction; -import org.elasticsearch.rest.action.search.template.RestRenderSearchTemplateAction; -import org.elasticsearch.rest.action.search.template.RestSearchTemplateAction; import org.elasticsearch.script.ScriptEngineService; import java.util.Arrays; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestDeleteSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestDeleteSearchTemplateAction.java similarity index 96% rename from modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestDeleteSearchTemplateAction.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestDeleteSearchTemplateAction.java index 075e6431b4b..ed4506686d8 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestDeleteSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestDeleteSearchTemplateAction.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.rest.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestGetSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestGetSearchTemplateAction.java similarity index 96% rename from modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestGetSearchTemplateAction.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestGetSearchTemplateAction.java index b2c44e10a48..47ab373b261 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestGetSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestGetSearchTemplateAction.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.rest.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java similarity index 93% rename from modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestMultiSearchTemplateAction.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java index add4719ad93..c0ed4b59ef6 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java @@ -17,13 +17,10 @@ * under the License. */ -package org.elasticsearch.rest.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.search.template.MultiSearchTemplateAction; -import org.elasticsearch.action.search.template.MultiSearchTemplateRequest; -import org.elasticsearch.action.search.template.SearchTemplateRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestPutSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestPutSearchTemplateAction.java similarity index 96% rename from modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestPutSearchTemplateAction.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestPutSearchTemplateAction.java index cca88fd4ecd..71530eb4409 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestPutSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestPutSearchTemplateAction.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.rest.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestRenderSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java similarity index 92% rename from modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestRenderSearchTemplateAction.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java index 5e7e75e9c81..33c8a17c742 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestRenderSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java @@ -17,10 +17,8 @@ * under the License. */ -package org.elasticsearch.rest.action.search.template; +package org.elasticsearch.script.mustache; -import org.elasticsearch.action.search.template.SearchTemplateAction; -import org.elasticsearch.action.search.template.SearchTemplateRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java similarity index 96% rename from modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestSearchTemplateAction.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index e12137ee3c5..42cd3987d47 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/rest/action/search/template/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -17,12 +17,10 @@ * under the License. */ -package org.elasticsearch.rest.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.template.SearchTemplateAction; -import org.elasticsearch.action.search.template.SearchTemplateRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/SearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java similarity index 93% rename from modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/SearchTemplateAction.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java index a3501f51bb1..2982fbd70c6 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/SearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java @@ -17,10 +17,9 @@ * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.action.Action; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.ElasticsearchClient; public class SearchTemplateAction extends Action { diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/SearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java similarity index 99% rename from modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/SearchTemplateRequest.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java index ff3d97401a3..a4a65336046 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/SearchTemplateRequest.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/SearchTemplateRequestBuilder.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequestBuilder.java similarity index 97% rename from modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/SearchTemplateRequestBuilder.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequestBuilder.java index 3d44535d80b..811c2523e06 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/SearchTemplateRequestBuilder.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequestBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.search.SearchRequest; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java similarity index 98% rename from modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/SearchTemplateResponse.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index 86b12a0fe56..f4647b713de 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.search.SearchResponse; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java similarity index 98% rename from modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportMultiSearchTemplateAction.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java index 642fe7648da..a613f15972b 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java similarity index 97% rename from modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportSearchTemplateAction.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java index 0c55a63596a..ded396deb12 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; @@ -36,7 +36,6 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.suggest.Suggesters; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java similarity index 98% rename from modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateIT.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java index 03d04e518dc..f5d1a9dd791 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java @@ -17,14 +17,13 @@ * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.mustache.MustachePlugin; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java similarity index 95% rename from modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateRequestTests.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java index 4724a758107..d7807c1a268 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateRequestTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java @@ -17,11 +17,10 @@ * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.search.template.RestMultiSearchTemplateAction; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; @@ -34,7 +33,7 @@ import static org.hamcrest.Matchers.nullValue; public class MultiSearchTemplateRequestTests extends ESTestCase { public void testParseRequest() throws Exception { - byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/template/simple-msearch-template.json"); + byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/script/mustache/simple-msearch-template.json"); RestRequest restRequest = new FakeRestRequest.Builder().withContent(new BytesArray(data)).build(); MultiSearchTemplateRequest request = RestMultiSearchTemplateAction.parseRequest(restRequest, true); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index 6c85468ea4a..693ada174b9 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -19,15 +19,11 @@ package org.elasticsearch.script.mustache; import com.github.mustachejava.MustacheFactory; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.Query; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; @@ -63,7 +59,8 @@ public class MustacheScriptEngineTests extends ESTestCase { + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}" + "}}, \"negative_boost\": {{boost_val}} } }}"; Map vars = new HashMap<>(); vars.put("boost_val", "0.3"); - BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(null, template, compileParams)), vars).run(); + BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", + qe.compile(null, template, compileParams)), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.3 } }}", o.utf8ToString()); @@ -74,7 +71,8 @@ public class MustacheScriptEngineTests extends ESTestCase { Map vars = new HashMap<>(); vars.put("boost_val", "0.3"); vars.put("body_val", "\"quick brown\""); - BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(null, template, compileParams)), vars).run(); + BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", + qe.compile(null, template, compileParams)), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"\\\"quick brown\\\"\"}}}, \"negative_boost\": 0.3 } }}", o.utf8ToString()); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java index 91098eb1c88..9b48afe834a 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java @@ -44,7 +44,6 @@ import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.script.ScriptService.ScriptType.INLINE; import static org.elasticsearch.script.mustache.MustacheScriptEngineService.CONTENT_TYPE_PARAM; -import static org.elasticsearch.script.mustache.MustacheScriptEngineService.JSON_CONTENT_TYPE; import static org.elasticsearch.script.mustache.MustacheScriptEngineService.PLAIN_TEXT_CONTENT_TYPE; import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.containsString; @@ -147,7 +146,6 @@ public class MustacheTests extends ESTestCase { public void testEscaping() { // json string escaping enabled: - Map params = randomBoolean() ? Collections.emptyMap() : Collections.singletonMap(CONTENT_TYPE_PARAM, JSON_CONTENT_TYPE); Mustache mustache = (Mustache) engine.compile(null, "{ \"field1\": \"{{value}}\"}", Collections.emptyMap()); CompiledScript compiledScript = new CompiledScript(INLINE, "name", "mustache", mustache); ExecutableScript executableScript = engine.executable(compiledScript, Collections.singletonMap("value", "a \"value\"")); @@ -156,7 +154,8 @@ public class MustacheTests extends ESTestCase { assertThat(result, equalTo("{ \"field1\": \"a \\\"value\\\"\"}")); // json string escaping disabled: - mustache = (Mustache) engine.compile(null, "{ \"field1\": \"{{value}}\"}", Collections.singletonMap(CONTENT_TYPE_PARAM, PLAIN_TEXT_CONTENT_TYPE)); + mustache = (Mustache) engine.compile(null, "{ \"field1\": \"{{value}}\"}", + Collections.singletonMap(CONTENT_TYPE_PARAM, PLAIN_TEXT_CONTENT_TYPE)); compiledScript = new CompiledScript(INLINE, "name", "mustache", mustache); executableScript = engine.executable(compiledScript, Collections.singletonMap("value", "a \"value\"")); rawResult = (BytesReference) executableScript.run(); @@ -236,10 +235,12 @@ public class MustacheTests extends ESTestCase { Map ctx = Collections.singletonMap("ctx", humans); assertScript("{{#toJson}}.{{/toJson}}", ctx, - equalTo("{\"ctx\":{\"first\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84},\"second\":{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}}}")); + equalTo("{\"ctx\":{\"first\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84},\"second\":" + + "{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}}}")); assertScript("{{#toJson}}ctx{{/toJson}}", ctx, - equalTo("{\"first\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84},\"second\":{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}}")); + equalTo("{\"first\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84},\"second\":" + + "{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}}")); assertScript("{{#toJson}}ctx.first{{/toJson}}", ctx, equalTo("{\"name\":\"John Smith\",\"age\":42,\"height\":1.84}")); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/SearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java similarity index 98% rename from modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/SearchTemplateIT.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java index ca5ed3e82f4..c3656029bc4 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/SearchTemplateIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -25,12 +25,8 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.action.search.template.RestSearchTemplateAction; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.mustache.MustachePlugin; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; -import org.elasticsearch.script.mustache.TemplateQueryBuilder; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/SearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java similarity index 98% rename from modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/SearchTemplateRequestTests.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java index bbac6dabcee..b5831a3c5a8 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/SearchTemplateRequestTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java @@ -17,12 +17,11 @@ * under the License. */ -package org.elasticsearch.action.search.template; +package org.elasticsearch.script.mustache; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.rest.action.search.template.RestSearchTemplateAction; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; diff --git a/modules/lang-mustache/src/test/resources/org/elasticsearch/action/search/template/simple-msearch-template.json b/modules/lang-mustache/src/test/resources/org/elasticsearch/script/mustache/simple-msearch-template.json similarity index 100% rename from modules/lang-mustache/src/test/resources/org/elasticsearch/action/search/template/simple-msearch-template.json rename to modules/lang-mustache/src/test/resources/org/elasticsearch/script/mustache/simple-msearch-template.json From 124a9fabe3c2ff0d11ec314b18b2e7bac0639f3b Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 22 Oct 2015 22:21:18 -0600 Subject: [PATCH 55/93] Circuit break on aggregation bucket numbers with request breaker This adds new circuit breaking with the "request" breaker, which adds circuit breaks based on the number of buckets created during aggregations. It consists of incrementing during AggregatorBase creation This also bumps the REQUEST breaker to 60% of the JVM heap now. The output when circuit breaking an aggregation looks like: ```json { "shard" : 0, "index" : "i", "node" : "a5AvjUn_TKeTNYl0FyBW2g", "reason" : { "type" : "exception", "reason" : "java.util.concurrent.ExecutionException: QueryPhaseExecutionException[Query Failed [Failed to execute main query]]; nested: CircuitBreakingException[[request] Data too large, data for [] would be larger than limit of [104857600/100mb]];", "caused_by" : { "type" : "execution_exception", "reason" : "QueryPhaseExecutionException[Query Failed [Failed to execute main query]]; nested: CircuitBreakingException[[request] Data too large, data for [] would be larger than limit of [104857600/100mb]];", "caused_by" : { "type" : "circuit_breaking_exception", "reason" : "[request] Data too large, data for [] would be larger than limit of [104857600/100mb]", "bytes_wanted" : 104860781, "bytes_limit" : 104857600 } } } } ``` Relates to #14046 --- .../elasticsearch/common/util/BigArrays.java | 4 ++ .../HierarchyCircuitBreakerService.java | 14 +++--- .../search/aggregations/AggregatorBase.java | 25 ++++++++++- .../breaker/CircuitBreakerServiceIT.java | 44 ++++++++++++++++++- .../modules/indices/circuit_breaker.asciidoc | 3 +- 5 files changed, 80 insertions(+), 10 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/util/BigArrays.java b/core/src/main/java/org/elasticsearch/common/util/BigArrays.java index da49aac5ed0..6a15a3d9000 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigArrays.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigArrays.java @@ -429,6 +429,10 @@ public class BigArrays implements Releasable { return this.circuitBreakingInstance; } + public CircuitBreakerService breakerService() { + return this.circuitBreakingInstance.breakerService; + } + private T resizeInPlace(T array, long newSize) { final long oldMemSize = array.ramBytesUsed(); array.resize(newSize); diff --git a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index 328b9564a5d..65571482093 100644 --- a/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/core/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -57,7 +57,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { new Setting<>("indices.breaker.fielddata.type", "memory", CircuitBreaker.Type::parseValue, Property.NodeScope); public static final Setting REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING = - Setting.byteSizeSetting("indices.breaker.request.limit", "40%", Property.Dynamic, Property.NodeScope); + Setting.byteSizeSetting("indices.breaker.request.limit", "60%", Property.Dynamic, Property.NodeScope); public static final Setting REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING = Setting.doubleSetting("indices.breaker.request.overhead", 1.0d, 0.0d, Property.Dynamic, Property.NodeScope); public static final Setting REQUEST_CIRCUIT_BREAKER_TYPE_SETTING = @@ -98,7 +98,10 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.get(settings) ); - this.parentSettings = new BreakerSettings(CircuitBreaker.PARENT, TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).bytes(), 1.0, CircuitBreaker.Type.PARENT); + this.parentSettings = new BreakerSettings(CircuitBreaker.PARENT, + TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).bytes(), 1.0, + CircuitBreaker.Type.PARENT); + if (logger.isTraceEnabled()) { logger.trace("parent circuit breaker with settings {}", this.parentSettings); } @@ -137,7 +140,6 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { registerBreaker(newFielddataSettings); HierarchyCircuitBreakerService.this.fielddataSettings = newFielddataSettings; logger.info("Updated breaker settings field data: {}", newFielddataSettings); - } private boolean validateTotalCircuitBreakerLimit(ByteSizeValue byteSizeValue) { @@ -184,7 +186,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { } // Manually add the parent breaker settings since they aren't part of the breaker map allStats.add(new CircuitBreakerStats(CircuitBreaker.PARENT, parentSettings.getLimit(), - parentEstimated, 1.0, parentTripCount.get())); + parentEstimated, 1.0, parentTripCount.get())); return new AllCircuitBreakerStats(allStats.toArray(new CircuitBreakerStats[allStats.size()])); } @@ -207,8 +209,8 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { if (totalUsed > parentLimit) { this.parentTripCount.incrementAndGet(); throw new CircuitBreakingException("[parent] Data too large, data for [" + - label + "] would be larger than limit of [" + - parentLimit + "/" + new ByteSizeValue(parentLimit) + "]", + label + "] would be larger than limit of [" + + parentLimit + "/" + new ByteSizeValue(parentLimit) + "]", totalUsed, parentLimit); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java index 04023b04977..c99da85f331 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java @@ -18,7 +18,10 @@ */ package org.elasticsearch.search.aggregations; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.aggregations.bucket.BestBucketsDeferringCollector; import org.elasticsearch.search.aggregations.bucket.DeferringBucketCollector; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -37,6 +40,9 @@ import java.util.Map; */ public abstract class AggregatorBase extends Aggregator { + /** The default "weight" that a bucket takes when performing an aggregation */ + public static final int DEFAULT_WEIGHT = 1024 * 5; // 5kb + protected final String name; protected final Aggregator parent; protected final AggregationContext context; @@ -48,6 +54,8 @@ public abstract class AggregatorBase extends Aggregator { private Map subAggregatorbyName; private DeferringBucketCollector recordingWrapper; private final List pipelineAggregators; + private final CircuitBreakerService breakerService; + private boolean failed = false; /** * Constructs a new Aggregator. @@ -65,6 +73,7 @@ public abstract class AggregatorBase extends Aggregator { this.metaData = metaData; this.parent = parent; this.context = context; + this.breakerService = context.bigArrays().breakerService(); assert factories != null : "sub-factories provided to BucketAggregator must not be null, use AggragatorFactories.EMPTY instead"; this.subAggregators = factories.createSubAggregators(this); context.searchContext().addReleasable(this, Lifetime.PHASE); @@ -96,6 +105,14 @@ public abstract class AggregatorBase extends Aggregator { return false; // unreachable } }; + try { + this.breakerService + .getBreaker(CircuitBreaker.REQUEST) + .addEstimateBytesAndMaybeBreak(DEFAULT_WEIGHT, ""); + } catch (CircuitBreakingException cbe) { + this.failed = true; + throw cbe; + } } /** @@ -245,7 +262,13 @@ public abstract class AggregatorBase extends Aggregator { /** Called upon release of the aggregator. */ @Override public void close() { - doClose(); + try { + doClose(); + } finally { + if (!this.failed) { + this.breakerService.getBreaker(CircuitBreaker.REQUEST).addWithoutBreaking(-DEFAULT_WEIGHT); + } + } } /** Release instance-specific data. */ diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java index b448f35c21b..361bca6076a 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -48,6 +48,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.junit.After; import org.junit.Before; @@ -59,6 +60,7 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.cardinality; +import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; @@ -316,9 +318,49 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { client.prepareSearch("cb-test").setQuery(matchAllQuery()).addAggregation(cardinality("card").field("test")).get(); fail("aggregation should have tripped the breaker"); } catch (Exception e) { - String errMsg = "CircuitBreakingException[[request] Data too large, data for [] would be larger than limit of [10/10b]]"; + String errMsg = "CircuitBreakingException[[request] Data too large"; assertThat("Exception: [" + e.toString() + "] should contain a CircuitBreakingException", e.toString(), containsString(errMsg)); + errMsg = "would be larger than limit of [10/10b]]"; + assertThat("Exception: [" + e.toString() + "] should contain a CircuitBreakingException", e.toString(), containsString(errMsg)); + } + } + + public void testBucketBreaker() throws Exception { + if (noopBreakerUsed()) { + logger.info("--> noop breakers used, skipping test"); + return; + } + assertAcked(prepareCreate("cb-test", 1, Settings.builder().put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)))); + Client client = client(); + + // Make request breaker limited to a small amount + Settings resetSettings = Settings.builder() + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "100b") + .build(); + assertAcked(client.admin().cluster().prepareUpdateSettings().setTransientSettings(resetSettings)); + + // index some different terms so we have some field data for loading + int docCount = scaledRandomIntBetween(100, 1000); + List reqs = new ArrayList<>(); + for (long id = 0; id < docCount; id++) { + reqs.add(client.prepareIndex("cb-test", "type", Long.toString(id)).setSource("test", id)); + } + indexRandom(true, reqs); + + // A terms aggregation on the "test" field should trip the bucket circuit breaker + try { + SearchResponse resp = client.prepareSearch("cb-test") + .setQuery(matchAllQuery()) + .addAggregation(terms("my_terms").field("test")) + .get(); + assertTrue("there should be shard failures", resp.getFailedShards() > 0); + fail("aggregation should have tripped the breaker"); + } catch (Exception e) { + String errMsg = "CircuitBreakingException[[request] " + + "Data too large, data for [] would be larger than limit of [100/100b]]"; + assertThat("Exception: " + e.toString() + " should contain a CircuitBreakingException", + e.toString(), containsString(errMsg)); } } diff --git a/docs/reference/modules/indices/circuit_breaker.asciidoc b/docs/reference/modules/indices/circuit_breaker.asciidoc index 762833d527b..fed1c350274 100644 --- a/docs/reference/modules/indices/circuit_breaker.asciidoc +++ b/docs/reference/modules/indices/circuit_breaker.asciidoc @@ -47,7 +47,7 @@ request) from exceeding a certain amount of memory. `indices.breaker.request.limit`:: - Limit for request breaker, defaults to 40% of JVM heap + Limit for request breaker, defaults to 60% of JVM heap `indices.breaker.request.overhead`:: @@ -74,4 +74,3 @@ memory on a node. The memory usage is based on the content length of the request [[http-circuit-breaker]] [float] - From 2f831c3abbf43ba1adc5407fc515b6f74cd6a6cf Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Mon, 25 Jul 2016 15:22:08 -0400 Subject: [PATCH 56/93] BytesArray tests fix: offsets don't matter on a zero bytes array Closes #19582 --- .../common/bytes/AbstractBytesReferenceTestCase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReferenceTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReferenceTestCase.java index 103e263ffa6..aae6522da12 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReferenceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReferenceTestCase.java @@ -430,7 +430,7 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5)); BytesReference pbr = newBytesReference(length); int sliceOffset = randomIntBetween(0, pbr.length() - 1); // an offset to the end would be len 0 - int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset); + int sliceLength = randomIntBetween(1, pbr.length() - sliceOffset); BytesReference slice = pbr.slice(sliceOffset, sliceLength); BytesRef singlePageOrNull = getSinglePageOrNull(slice); if (singlePageOrNull != null) { From b90dff7292b755ca0db0aac6e9a154a527662387 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Mon, 25 Jul 2016 22:00:49 +0200 Subject: [PATCH 57/93] increase log level to debug in testConcurrentSendRespondAndDisconnect --- .../transport/AbstractSimpleTransportTestCase.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 87087e772ab..58cc45a75a1 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -488,6 +488,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { assertThat(latch.await(5, TimeUnit.SECONDS), equalTo(true)); } + @TestLogging("transport:DEBUG") public void testConcurrentSendRespondAndDisconnect() throws BrokenBarrierException, InterruptedException { Set sendingErrors = ConcurrentCollections.newConcurrentSet(); Set responseErrors = ConcurrentCollections.newConcurrentSet(); @@ -561,14 +562,15 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { for (int iter = 0; iter < 10; iter++) { PlainActionFuture listener = new PlainActionFuture<>(); final String info = sender + "_" + iter; - serviceA.sendRequest(nodeB, "test", new TestRequest(info), + final DiscoveryNode node = nodeB; // capture now + serviceA.sendRequest(node, "test", new TestRequest(info), new ActionListenerResponseHandler<>(listener, TestResponse::new)); try { listener.actionGet(); } catch (ConnectTransportException e) { // ok! } catch (Exception e) { - logger.error("caught exception while sending to node {}", e, nodeB); + logger.error("caught exception while sending to node {}", e, node); sendingErrors.add(e); } } From a95d4f4ee7e7ada4b1ddec96b8deef140f96c1a7 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 19 Jul 2016 18:31:19 -0400 Subject: [PATCH 58/93] Add Location header and improve REST testing This adds a header that looks like `Location: /test/test/1` to the response for the index/create/update API. The requirement for the header comes from https://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html https://tools.ietf.org/html/rfc7231#section-7.1.2 claims that relative URIs are OK. So we use an absolute path which should resolve to the appropriate location. Closes #19079 This makes large changes to our rest test infrastructure, allowing us to write junit tests that test a running cluster via the rest client. It does this by splitting ESRestTestCase into two classes: * ESRestTestCase is the superclass of all tests that use the rest client to interact with a running cluster. * ESClientYamlSuiteTestCase is the superclass of all tests that use the rest client to run the yaml tests. These tests are shared across all official clients, thus the `ClientYamlSuite` part of the name. --- .../resources/checkstyle_suppressions.xml | 1 - .../action/DocWriteResponse.java | 25 + .../common/xcontent/StatusToXContent.java | 3 +- .../rest/action/index/RestIndexAction.java | 5 +- .../support/RestStatusToXContentListener.java | 28 +- .../rest/action/update/RestUpdateAction.java | 2 +- .../action/DocWriteResponseTests.java | 33 ++ .../org/elasticsearch/test/rest/RestIT.java | 2 +- .../test/rest/CreatedLocationHeaderIT.java | 65 +++ .../org/elasticsearch/test/rest/RestIT.java | 2 +- .../org/elasticsearch/test/rest/RestIT.java | 2 +- .../org/elasticsearch/test/rest/RestIT.java | 2 +- .../org/elasticsearch/test/rest/RestIT.java | 2 +- .../smoketest/SmokeTestDocsIT.java | 6 +- .../matrix/MatrixAggregationRestIT.java | 6 +- .../ingest/common/IngestCommonRestIT.java | 6 +- .../script/expression/ExpressionRestIT.java | 6 +- .../script/groovy/GroovyRestIT.java | 6 +- .../script/mustache/MustacheRestIT.java | 6 +- .../painless/PainlessRestIT.java | 6 +- .../percolator/PercolatorRestIT.java | 6 +- .../index/reindex/ReindexRestIT.java | 6 +- .../http/netty3/Netty3RestIT.java | 7 +- .../http/netty4/Netty4RestIT.java | 7 +- .../index/analysis/AnalysisICURestIT.java | 6 +- .../analysis/AnalysisKuromojiRestIT.java | 6 +- .../analysis/AnalysisPhoneticRestIT.java | 6 +- .../analysis/AnalysisSmartChineseRestIT.java | 6 +- .../index/analysis/AnalysisPolishRestIT.java | 6 +- .../azure/classic/AzureDiscoveryRestIT.java | 6 +- .../cloud/aws/DiscoveryEc2RestIT.java | 6 +- .../discovery/gce/DiscoveryGCERestIT.java | 6 +- .../attachment/IngestAttachmentRestIT.java | 6 +- .../ingest/geoip/IngestGeoIpRestIT.java | 6 +- .../ingest/useragent/UserAgentRestIT.java | 6 +- .../plugin/example/JvmExampleRestIT.java | 6 +- .../javascript/LangJavaScriptRestIT.java | 6 +- .../script/python/LangPythonScriptRestIT.java | 6 +- .../attachments/MapperAttachmentsRestIT.java | 4 +- .../mapper/murmur3/MapperMurmur3RestIT.java | 4 +- .../index/mapper/size/MapperSizeRestIT.java | 4 +- .../azure/AzureRepositoryRestIT.java | 6 +- .../GoogleCloudStorageRepositoryRestIT.java | 4 +- .../hdfs/HdfsRepositoryRestIT.java | 6 +- .../repositories/s3/RepositoryS3RestIT.java | 6 +- .../index/store/SMBStoreRestIT.java | 6 +- .../backwards/MultiNodeBackwardsIT.java | 4 +- .../smoketest/IngestDisabledIT.java | 6 +- .../smoketest/IngestWithDependenciesIT.java | 6 +- .../smoketest/SmokeTestMultiIT.java | 6 +- .../smoketest/SmokeTestPluginsIT.java | 6 +- .../SmokeTestReindexWithPainlessIT.java | 6 +- .../junit/listeners/ReproduceInfoPrinter.java | 16 +- .../test/rest/ESClientYamlSuiteTestCase.java | 319 +++++++++++++ .../test/rest/ESRestTestCase.java | 439 ++++++------------ .../test/rest/RestTestExecutionContext.java | 22 +- .../test/rest/client/RestTestClient.java | 93 +--- .../test/rest/support/Features.java | 10 +- 58 files changed, 762 insertions(+), 535 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java create mode 100644 distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java create mode 100644 test/framework/src/main/java/org/elasticsearch/test/rest/ESClientYamlSuiteTestCase.java diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 0203ab9de76..339abc8c02d 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -535,7 +535,6 @@ - diff --git a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java index 0925c744144..49ac5d4f8a4 100644 --- a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.support.replication.ReplicationResponse; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.StatusToXContent; @@ -109,6 +110,30 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr return getShardInfo().status(); } + /** + * Gets the location of the written document as a string suitable for a {@code Location} header. + * @param routing any routing used in the request. If null the location doesn't include routing information. + */ + public String getLocation(@Nullable String routing) { + // Absolute path for the location of the document. This should be allowed as of HTTP/1.1: + // https://tools.ietf.org/html/rfc7231#section-7.1.2 + String index = getIndex(); + String type = getType(); + String id = getId(); + String routingStart = "?routing="; + int bufferSize = 3 + index.length() + type.length() + id.length(); + if (routing != null) { + bufferSize += routingStart.length() + routing.length(); + } + StringBuilder location = new StringBuilder(bufferSize); + location.append('/').append(index); + location.append('/').append(type); + location.append('/').append(id); + if (routing != null) { + location.append(routingStart).append(routing); + } + return location.toString(); + } @Override public void readFrom(StreamInput in) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/StatusToXContent.java b/core/src/main/java/org/elasticsearch/common/xcontent/StatusToXContent.java index d181ffc21d1..f22aa39613f 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/StatusToXContent.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/StatusToXContent.java @@ -21,7 +21,8 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.rest.RestStatus; /** - * + * Objects that can both render themselves in as json/yaml/etc and can provide a {@link RestStatus} for their response. Usually should be + * implemented by top level responses sent back to users from REST endpoints. */ public interface StatusToXContent extends ToXContent { diff --git a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java index c3c4549aa63..a4222adacd7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java @@ -90,7 +90,8 @@ public class RestIndexAction extends BaseRestHandler { } catch (IllegalArgumentException eia){ try { XContentBuilder builder = channel.newErrorBuilder(); - channel.sendResponse(new BytesRestResponse(BAD_REQUEST, builder.startObject().field("error", eia.getMessage()).endObject())); + channel.sendResponse( + new BytesRestResponse(BAD_REQUEST, builder.startObject().field("error", eia.getMessage()).endObject())); } catch (IOException e1) { logger.warn("Failed to send response", e1); return; @@ -101,6 +102,6 @@ public class RestIndexAction extends BaseRestHandler { if (consistencyLevel != null) { indexRequest.consistencyLevel(WriteConsistencyLevel.fromString(consistencyLevel)); } - client.index(indexRequest, new RestStatusToXContentListener<>(channel)); + client.index(indexRequest, new RestStatusToXContentListener<>(channel, r -> r.getLocation(indexRequest.routing()))); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestStatusToXContentListener.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestStatusToXContentListener.java index 6bd9a900c8e..3e4677ab446 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestStatusToXContentListener.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestStatusToXContentListener.java @@ -23,14 +23,30 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; + +import java.util.function.Function; /** - * + * Content listener that extracts that {@link RestStatus} from the response. */ public class RestStatusToXContentListener extends RestResponseListener { + private final Function extractLocation; + /** + * Build an instance that doesn't support responses with the status {@code 201 CREATED}. + */ public RestStatusToXContentListener(RestChannel channel) { + // TODO switch this to throwing an exception? + this(channel, r -> null); + } + + /** + * Build an instance that does support responses with the status {@code 201 CREATED}. + */ + public RestStatusToXContentListener(RestChannel channel, Function extractLocation) { super(channel); + this.extractLocation = extractLocation; } @Override @@ -42,7 +58,13 @@ public class RestStatusToXContentListener ext builder.startObject(); response.toXContent(builder, channel.request()); builder.endObject(); - return new BytesRestResponse(response.status(), builder); + BytesRestResponse restResponse = new BytesRestResponse(response.status(), builder); + if (RestStatus.CREATED == restResponse.status()) { + String location = extractLocation.apply(response); + if (location != null) { + restResponse.addHeader("Location", location); + } + } + return restResponse; } - } diff --git a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java index ddd52001e03..cfe26d35c56 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java @@ -97,6 +97,6 @@ public class RestUpdateAction extends BaseRestHandler { } } - client.update(updateRequest, new RestStatusToXContentListener<>(channel)); + client.update(updateRequest, new RestStatusToXContentListener<>(channel, r -> r.getLocation(updateRequest.routing()))); } } diff --git a/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java b/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java new file mode 100644 index 00000000000..e652a670a67 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java @@ -0,0 +1,33 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action; + +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.ESTestCase; + +public class DocWriteResponseTests extends ESTestCase { + public void testGetLocation() { + DocWriteResponse response = new DocWriteResponse(new ShardId("index", "uuid", 0), "type", "id", 0) { + // DocWriteResponse is abstract so we have to sneak a subclass in here to test it. + }; + assertEquals("/index/type/id", response.getLocation(null)); + assertEquals("/index/type/id?routing=test_routing", response.getLocation("test_routing")); + } +} diff --git a/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java index 2d90db2c5b7..94629641849 100644 --- a/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -25,7 +25,7 @@ import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ -public class RestIT extends ESRestTestCase { +public class RestIT extends ESClientYamlSuiteTestCase { public RestIT(RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java new file mode 100644 index 00000000000..71ec1948662 --- /dev/null +++ b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.rest; + +import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Response; + +import java.io.IOException; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.startsWith; + +/** + * Tests for the "Location" header returned when returning {@code 201 CREATED}. + */ +public class CreatedLocationHeaderIT extends ESRestTestCase { + public void testCreate() throws IOException { + locationTestCase("PUT", "test/test/1"); + } + + public void testIndexWithId() throws IOException { + locationTestCase("PUT", "test/test/1"); + } + + public void testIndexWithoutId() throws IOException { + locationTestCase("POST", "test/test"); + } + + public void testUpsert() throws IOException { + locationTestCase(client().performRequest("POST", "test/test/1/_update", emptyMap(), new StringEntity("{" + + "\"doc\": {\"test\": \"test\"}," + + "\"doc_as_upsert\": true}"))); + } + + private void locationTestCase(String method, String url) throws IOException { + locationTestCase(client().performRequest(method, url, emptyMap(), new StringEntity("{\"test\": \"test\"}"))); + locationTestCase(client().performRequest(method, url + "?routing=cat", emptyMap(), new StringEntity("{\"test\": \"test\"}"))); + } + + private void locationTestCase(Response response) throws IOException { + assertEquals(201, response.getStatusLine().getStatusCode()); + String location = response.getHeader("Location"); + assertThat(location, startsWith("/test/test/")); + Response getResponse = client().performRequest("GET", location); + assertEquals(singletonMap("test", "test"), entityAsMap(getResponse).get("_source")); + } +} diff --git a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java index 2d90db2c5b7..94629641849 100644 --- a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -25,7 +25,7 @@ import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ -public class RestIT extends ESRestTestCase { +public class RestIT extends ESClientYamlSuiteTestCase { public RestIT(RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java index 2d90db2c5b7..94629641849 100644 --- a/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -25,7 +25,7 @@ import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ -public class RestIT extends ESRestTestCase { +public class RestIT extends ESClientYamlSuiteTestCase { public RestIT(RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java index 2d90db2c5b7..94629641849 100644 --- a/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -25,7 +25,7 @@ import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ -public class RestIT extends ESRestTestCase { +public class RestIT extends ESClientYamlSuiteTestCase { public RestIT(RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java index 2d90db2c5b7..94629641849 100644 --- a/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -25,7 +25,7 @@ import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ -public class RestIT extends ESRestTestCase { +public class RestIT extends ESClientYamlSuiteTestCase { public RestIT(RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/docs/src/test/java/org/elasticsearch/smoketest/SmokeTestDocsIT.java b/docs/src/test/java/org/elasticsearch/smoketest/SmokeTestDocsIT.java index 452bea4a647..d8eccda61a2 100644 --- a/docs/src/test/java/org/elasticsearch/smoketest/SmokeTestDocsIT.java +++ b/docs/src/test/java/org/elasticsearch/smoketest/SmokeTestDocsIT.java @@ -22,14 +22,14 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; import java.util.List; -public class SmokeTestDocsIT extends ESRestTestCase { +public class SmokeTestDocsIT extends ESClientYamlSuiteTestCase { public SmokeTestDocsIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -37,7 +37,7 @@ public class SmokeTestDocsIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } @Override diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationRestIT.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationRestIT.java index 69ddd46edc2..47609494a61 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationRestIT.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationRestIT.java @@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.matrix; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; @@ -29,13 +29,13 @@ import java.io.IOException; /** * */ -public class MatrixAggregationRestIT extends ESRestTestCase { +public class MatrixAggregationRestIT extends ESClientYamlSuiteTestCase { public MatrixAggregationRestIT(@Name("yaml")RestTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonRestIT.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonRestIT.java index 94418ac552c..65ee917f30e 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonRestIT.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.ingest.common; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class IngestCommonRestIT extends ESRestTestCase { +public class IngestCommonRestIT extends ESClientYamlSuiteTestCase { public IngestCommonRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class IngestCommonRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java index 3da22bf8a43..8ff5168a7cf 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.script.expression; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class ExpressionRestIT extends ESRestTestCase { +public class ExpressionRestIT extends ESClientYamlSuiteTestCase { public ExpressionRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class ExpressionRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java index b73ec250daf..b681e5f6857 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.script.groovy; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class GroovyRestIT extends ESRestTestCase { +public class GroovyRestIT extends ESClientYamlSuiteTestCase { public GroovyRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class GroovyRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java index 727d0c4316d..95f86dec3c7 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.script.mustache; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class MustacheRestIT extends ESRestTestCase { +public class MustacheRestIT extends ESClientYamlSuiteTestCase { public MustacheRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class MustacheRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java index dbb596c275c..353aa13f839 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java @@ -21,14 +21,14 @@ package org.elasticsearch.painless; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; /** Runs yaml rest tests */ -public class PainlessRestIT extends ESRestTestCase { +public class PainlessRestIT extends ESClientYamlSuiteTestCase { public PainlessRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -36,7 +36,7 @@ public class PainlessRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorRestIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorRestIT.java index 9f4eff64995..f4ec1b5215e 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorRestIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorRestIT.java @@ -21,19 +21,19 @@ package org.elasticsearch.percolator; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class PercolatorRestIT extends ESRestTestCase { +public class PercolatorRestIT extends ESClientYamlSuiteTestCase { public PercolatorRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRestIT.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRestIT.java index d8718c5b493..c67908f631d 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRestIT.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRestIT.java @@ -22,19 +22,19 @@ package org.elasticsearch.index.reindex; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class ReindexRestIT extends ESRestTestCase { +public class ReindexRestIT extends ESClientYamlSuiteTestCase { public ReindexRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3RestIT.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3RestIT.java index a276922354a..9aa57bc312f 100644 --- a/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3RestIT.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3RestIT.java @@ -21,13 +21,14 @@ package org.elasticsearch.http.netty3; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; + +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class Netty3RestIT extends ESRestTestCase { +public class Netty3RestIT extends ESClientYamlSuiteTestCase { public Netty3RestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,6 +36,6 @@ public class Netty3RestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java index 6566428efa3..cbd8f7cd1e6 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java @@ -21,13 +21,14 @@ package org.elasticsearch.http.netty4; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; + +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class Netty4RestIT extends ESRestTestCase { +public class Netty4RestIT extends ESClientYamlSuiteTestCase { public Netty4RestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +36,7 @@ public class Netty4RestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } \ No newline at end of file diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java index 8da56d5a72b..17e5e1c3ca7 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class AnalysisICURestIT extends ESRestTestCase { +public class AnalysisICURestIT extends ESClientYamlSuiteTestCase { public AnalysisICURestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class AnalysisICURestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java index ae51e491d6b..5697fb55bde 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class AnalysisKuromojiRestIT extends ESRestTestCase { +public class AnalysisKuromojiRestIT extends ESClientYamlSuiteTestCase { public AnalysisKuromojiRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class AnalysisKuromojiRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java index 9d66bf24357..48f1c4b1716 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class AnalysisPhoneticRestIT extends ESRestTestCase { +public class AnalysisPhoneticRestIT extends ESClientYamlSuiteTestCase { public AnalysisPhoneticRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class AnalysisPhoneticRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java index 16113b2b7ac..58c86acea84 100644 --- a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java +++ b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class AnalysisSmartChineseRestIT extends ESRestTestCase { +public class AnalysisSmartChineseRestIT extends ESClientYamlSuiteTestCase { public AnalysisSmartChineseRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class AnalysisSmartChineseRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java index 330ad87af74..54da5de8e4a 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class AnalysisPolishRestIT extends ESRestTestCase { +public class AnalysisPolishRestIT extends ESClientYamlSuiteTestCase { public AnalysisPolishRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class AnalysisPolishRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryRestIT.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryRestIT.java index cb04842cb4a..5506169f79d 100644 --- a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryRestIT.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.discovery.azure.classic; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class AzureDiscoveryRestIT extends ESRestTestCase { +public class AzureDiscoveryRestIT extends ESClientYamlSuiteTestCase { public AzureDiscoveryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class AzureDiscoveryRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java index 24ccf82a3d8..cc621dd5949 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.cloud.aws; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class DiscoveryEc2RestIT extends ESRestTestCase { +public class DiscoveryEc2RestIT extends ESClientYamlSuiteTestCase { public DiscoveryEc2RestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class DiscoveryEc2RestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java index 891dd156aac..dae943f7bd2 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.discovery.gce; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class DiscoveryGCERestIT extends ESRestTestCase { +public class DiscoveryGCERestIT extends ESClientYamlSuiteTestCase { public DiscoveryGCERestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class DiscoveryGCERestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentRestIT.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentRestIT.java index 2399f854c91..671b5b0ab95 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentRestIT.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.ingest.attachment; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class IngestAttachmentRestIT extends ESRestTestCase { +public class IngestAttachmentRestIT extends ESClientYamlSuiteTestCase { public IngestAttachmentRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class IngestAttachmentRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java index 0e4d1ee4b2b..19cd7226d43 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java @@ -22,14 +22,14 @@ package org.elasticsearch.ingest.geoip; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; import java.util.Collection; -public class IngestGeoIpRestIT extends ESRestTestCase { +public class IngestGeoIpRestIT extends ESClientYamlSuiteTestCase { public IngestGeoIpRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -37,7 +37,7 @@ public class IngestGeoIpRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentRestIT.java b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentRestIT.java index 010f85f671a..0ca96698e3b 100644 --- a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentRestIT.java +++ b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.ingest.useragent; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class UserAgentRestIT extends ESRestTestCase { +public class UserAgentRestIT extends ESClientYamlSuiteTestCase { public UserAgentRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,6 +35,6 @@ public class UserAgentRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java index 74573a79289..62be8332439 100644 --- a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java +++ b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.plugin.example; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class JvmExampleRestIT extends ESRestTestCase { +public class JvmExampleRestIT extends ESClientYamlSuiteTestCase { public JvmExampleRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class JvmExampleRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java index 8039715c3d1..014c7073e71 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.script.javascript; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class LangJavaScriptRestIT extends ESRestTestCase { +public class LangJavaScriptRestIT extends ESClientYamlSuiteTestCase { public LangJavaScriptRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class LangJavaScriptRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java index ee0a707644f..cbf7547a580 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.script.python; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class LangPythonScriptRestIT extends ESRestTestCase { +public class LangPythonScriptRestIT extends ESClientYamlSuiteTestCase { public LangPythonScriptRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class LangPythonScriptRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java index 1eccb1a1445..0958225ae18 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.mapper.attachments; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class MapperAttachmentsRestIT extends ESRestTestCase { +public class MapperAttachmentsRestIT extends ESClientYamlSuiteTestCase { public MapperAttachmentsRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java index bbe342c716c..37a4490d38c 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.index.mapper.murmur3; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class MapperMurmur3RestIT extends ESRestTestCase { +public class MapperMurmur3RestIT extends ESClientYamlSuiteTestCase { public MapperMurmur3RestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java index 84df085f221..f5ee35c5638 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.index.mapper.size; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class MapperSizeRestIT extends ESRestTestCase { +public class MapperSizeRestIT extends ESClientYamlSuiteTestCase { public MapperSizeRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java index ad58838bf5b..3c565d407ce 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.repositories.azure; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class AzureRepositoryRestIT extends ESRestTestCase { +public class AzureRepositoryRestIT extends ESClientYamlSuiteTestCase { public AzureRepositoryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class AzureRepositoryRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java index 18862d05aa0..b19b8623ace 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.repositories.gcs; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class GoogleCloudStorageRepositoryRestIT extends ESRestTestCase { +public class GoogleCloudStorageRepositoryRestIT extends ESClientYamlSuiteTestCase { public GoogleCloudStorageRepositoryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java index dea6e8b749f..147838df9ea 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java @@ -22,11 +22,11 @@ import java.io.IOException; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; -public class HdfsRepositoryRestIT extends ESRestTestCase { +public class HdfsRepositoryRestIT extends ESClientYamlSuiteTestCase { public HdfsRepositoryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -34,6 +34,6 @@ public class HdfsRepositoryRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java index d8e436b50bb..5dada2a2637 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.repositories.s3; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class RepositoryS3RestIT extends ESRestTestCase { +public class RepositoryS3RestIT extends ESClientYamlSuiteTestCase { public RepositoryS3RestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class RepositoryS3RestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java index af1b0372995..99617b94644 100644 --- a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java +++ b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.index.store; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class SMBStoreRestIT extends ESRestTestCase { +public class SMBStoreRestIT extends ESClientYamlSuiteTestCase { public SMBStoreRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class SMBStoreRestIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java b/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java index 1f3ad15d1bf..e23ca840025 100644 --- a/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java +++ b/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java @@ -22,14 +22,14 @@ package org.elasticsearch.backwards; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.lucene.util.TimeUnits; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; @TimeoutSuite(millis = 40 * TimeUnits.MINUTE) // some of the windows test VMs are slow as hell -public class MultiNodeBackwardsIT extends ESRestTestCase { +public class MultiNodeBackwardsIT extends ESClientYamlSuiteTestCase { public MultiNodeBackwardsIT(RestTestCandidate testCandidate) { super(testCandidate); diff --git a/qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/IngestDisabledIT.java b/qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/IngestDisabledIT.java index e162807baca..d1cb62cc54b 100644 --- a/qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/IngestDisabledIT.java +++ b/qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/IngestDisabledIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class IngestDisabledIT extends ESRestTestCase { +public class IngestDisabledIT extends ESClientYamlSuiteTestCase { public IngestDisabledIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class IngestDisabledIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/IngestWithDependenciesIT.java b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/IngestWithDependenciesIT.java index b9873214aa2..1ed23e7add5 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/IngestWithDependenciesIT.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/IngestWithDependenciesIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class IngestWithDependenciesIT extends ESRestTestCase { +public class IngestWithDependenciesIT extends ESClientYamlSuiteTestCase { public IngestWithDependenciesIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class IngestWithDependenciesIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiIT.java b/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiIT.java index 75c4633e632..78d7acd8c24 100644 --- a/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiIT.java +++ b/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class SmokeTestMultiIT extends ESRestTestCase { +public class SmokeTestMultiIT extends ESClientYamlSuiteTestCase { public SmokeTestMultiIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class SmokeTestMultiIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsIT.java b/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsIT.java index 6e0243b2a04..61903e62993 100644 --- a/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsIT.java +++ b/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsIT.java @@ -21,13 +21,13 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class SmokeTestPluginsIT extends ESRestTestCase { +public class SmokeTestPluginsIT extends ESClientYamlSuiteTestCase { public SmokeTestPluginsIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); @@ -35,7 +35,7 @@ public class SmokeTestPluginsIT extends ESRestTestCase { @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessIT.java b/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessIT.java index 7857259a60d..b7061432325 100644 --- a/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessIT.java +++ b/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessIT.java @@ -21,19 +21,19 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class SmokeTestReindexWithPainlessIT extends ESRestTestCase { +public class SmokeTestReindexWithPainlessIT extends ESClientYamlSuiteTestCase { public SmokeTestReindexWithPainlessIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index 7103cda945b..d5c14935ecd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.junit.internal.AssumptionViolatedException; import org.junit.runner.Description; import org.junit.runner.notification.Failure; @@ -37,9 +37,9 @@ import static com.carrotsearch.randomizedtesting.SysGlobals.SYSPROP_ITERATIONS; import static com.carrotsearch.randomizedtesting.SysGlobals.SYSPROP_PREFIX; import static com.carrotsearch.randomizedtesting.SysGlobals.SYSPROP_TESTMETHOD; import static org.elasticsearch.test.ESIntegTestCase.TESTS_CLUSTER; -import static org.elasticsearch.test.rest.ESRestTestCase.REST_TESTS_BLACKLIST; -import static org.elasticsearch.test.rest.ESRestTestCase.REST_TESTS_SPEC; -import static org.elasticsearch.test.rest.ESRestTestCase.REST_TESTS_SUITE; +import static org.elasticsearch.test.rest.ESClientYamlSuiteTestCase.REST_TESTS_BLACKLIST; +import static org.elasticsearch.test.rest.ESClientYamlSuiteTestCase.REST_TESTS_SPEC; +import static org.elasticsearch.test.rest.ESClientYamlSuiteTestCase.REST_TESTS_SUITE; /** * A {@link RunListener} that emits a command you can use to re-run a failing test with the failing random seed to @@ -81,9 +81,9 @@ public class ReproduceInfoPrinter extends RunListener { GradleMessageBuilder gradleMessageBuilder = new GradleMessageBuilder(b); gradleMessageBuilder.appendAllOpts(failure.getDescription()); - //Rest tests are a special case as they allow for additional parameters - if (ESRestTestCase.class.isAssignableFrom(failure.getDescription().getTestClass())) { - gradleMessageBuilder.appendRestTestsProperties(); + // Client yaml suite tests are a special case as they allow for additional parameters + if (ESClientYamlSuiteTestCase.class.isAssignableFrom(failure.getDescription().getTestClass())) { + gradleMessageBuilder.appendClientYamlSuiteProperties(); } System.err.println(b.toString()); @@ -152,7 +152,7 @@ public class ReproduceInfoPrinter extends RunListener { return this; } - public ReproduceErrorMessageBuilder appendRestTestsProperties() { + public ReproduceErrorMessageBuilder appendClientYamlSuiteProperties() { return appendProperties(REST_TESTS_SUITE, REST_TESTS_SPEC, REST_TESTS_BLACKLIST); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESClientYamlSuiteTestCase.java new file mode 100644 index 00000000000..2611e20d447 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESClientYamlSuiteTestCase.java @@ -0,0 +1,319 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.rest; + +import com.carrotsearch.randomizedtesting.RandomizedTest; + +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.test.rest.parser.RestTestParseException; +import org.elasticsearch.test.rest.parser.RestTestSuiteParser; +import org.elasticsearch.test.rest.section.DoSection; +import org.elasticsearch.test.rest.section.ExecutableSection; +import org.elasticsearch.test.rest.section.RestTestSuite; +import org.elasticsearch.test.rest.section.SkipSection; +import org.elasticsearch.test.rest.section.TestSection; +import org.elasticsearch.test.rest.spec.RestApi; +import org.elasticsearch.test.rest.spec.RestSpec; +import org.elasticsearch.test.rest.support.FileUtils; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.FileSystem; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * Runs a suite of yaml tests shared with all the official Elasticsearch clients against against an elasticsearch cluster. + */ +public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { + + /** + * Property that allows to control which REST tests get run. Supports comma separated list of tests + * or directories that contain tests e.g. -Dtests.rest.suite=index,get,create/10_with_id + */ + public static final String REST_TESTS_SUITE = "tests.rest.suite"; + /** + * Property that allows to blacklist some of the REST tests based on a comma separated list of globs + * e.g. -Dtests.rest.blacklist=get/10_basic/* + */ + public static final String REST_TESTS_BLACKLIST = "tests.rest.blacklist"; + /** + * Property that allows to control whether spec validation is enabled or not (default true). + */ + public static final String REST_TESTS_VALIDATE_SPEC = "tests.rest.validate_spec"; + /** + * Property that allows to control where the REST spec files need to be loaded from + */ + public static final String REST_TESTS_SPEC = "tests.rest.spec"; + + public static final String REST_LOAD_PACKAGED_TESTS = "tests.rest.load_packaged"; + + private static final String DEFAULT_TESTS_PATH = "/rest-api-spec/test"; + private static final String DEFAULT_SPEC_PATH = "/rest-api-spec/api"; + + /** + * This separator pattern matches ',' except it is preceded by a '\'. + * This allows us to support ',' within paths when it is escaped with a slash. + * + * For example, the path string "/a/b/c\,d/e/f,/foo/bar,/baz" is separated to "/a/b/c\,d/e/f", "/foo/bar" and "/baz". + * + * For reference, this regular expression feature is known as zero-width negative look-behind. + * + */ + private static final String PATHS_SEPARATOR = "(? blacklistPathMatchers = new ArrayList<>(); + private static RestTestExecutionContext restTestExecutionContext; + private static RestTestExecutionContext adminExecutionContext; + + private final RestTestCandidate testCandidate; + + public ESClientYamlSuiteTestCase(RestTestCandidate testCandidate) { + this.testCandidate = testCandidate; + String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); + for (String entry : blacklist) { + this.blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); + } + + } + + @Override + protected void afterIfFailed(List errors) { + logger.info("Stash dump on failure [{}]", XContentHelper.toString(restTestExecutionContext.stash())); + super.afterIfFailed(errors); + } + + public static Iterable createParameters(int id, int count) throws IOException, RestTestParseException { + //parse tests only if rest test group is enabled, otherwise rest tests might not even be available on file system + List restTestCandidates = collectTestCandidates(id, count); + List objects = new ArrayList<>(); + for (RestTestCandidate restTestCandidate : restTestCandidates) { + objects.add(new Object[]{restTestCandidate}); + } + return objects; + } + + private static List collectTestCandidates(int id, int count) throws RestTestParseException, IOException { + List testCandidates = new ArrayList<>(); + FileSystem fileSystem = getFileSystem(); + // don't make a try-with, getFileSystem returns null + // ... and you can't close() the default filesystem + try { + String[] paths = resolvePathsProperty(REST_TESTS_SUITE, DEFAULT_TESTS_PATH); + Map> yamlSuites = FileUtils.findYamlSuites(fileSystem, DEFAULT_TESTS_PATH, paths); + RestTestSuiteParser restTestSuiteParser = new RestTestSuiteParser(); + //yaml suites are grouped by directory (effectively by api) + for (String api : yamlSuites.keySet()) { + List yamlFiles = new ArrayList<>(yamlSuites.get(api)); + for (Path yamlFile : yamlFiles) { + String key = api + yamlFile.getFileName().toString(); + if (mustExecute(key, id, count)) { + RestTestSuite restTestSuite = restTestSuiteParser.parse(api, yamlFile); + for (TestSection testSection : restTestSuite.getTestSections()) { + testCandidates.add(new RestTestCandidate(restTestSuite, testSection)); + } + } + } + } + } finally { + IOUtils.close(fileSystem); + } + + //sort the candidates so they will always be in the same order before being shuffled, for repeatability + Collections.sort(testCandidates, new Comparator() { + @Override + public int compare(RestTestCandidate o1, RestTestCandidate o2) { + return o1.getTestPath().compareTo(o2.getTestPath()); + } + }); + + return testCandidates; + } + + private static boolean mustExecute(String test, int id, int count) { + int hash = (int) (Math.abs((long)test.hashCode()) % count); + return hash == id; + } + + private static String[] resolvePathsProperty(String propertyName, String defaultValue) { + String property = System.getProperty(propertyName); + if (!Strings.hasLength(property)) { + return defaultValue == null ? Strings.EMPTY_ARRAY : new String[]{defaultValue}; + } else { + return property.split(PATHS_SEPARATOR); + } + } + + /** + * Returns a new FileSystem to read REST resources, or null if they + * are available from classpath. + */ + @SuppressForbidden(reason = "proper use of URL, hack around a JDK bug") + static FileSystem getFileSystem() throws IOException { + // REST suite handling is currently complicated, with lots of filtering and so on + // For now, to work embedded in a jar, return a ZipFileSystem over the jar contents. + URL codeLocation = FileUtils.class.getProtectionDomain().getCodeSource().getLocation(); + boolean loadPackaged = RandomizedTest.systemPropertyAsBoolean(REST_LOAD_PACKAGED_TESTS, true); + if (codeLocation.getFile().endsWith(".jar") && loadPackaged) { + try { + // hack around a bug in the zipfilesystem implementation before java 9, + // its checkWritable was incorrect and it won't work without write permissions. + // if we add the permission, it will open jars r/w, which is too scary! so copy to a safe r-w location. + Path tmp = Files.createTempFile(null, ".jar"); + try (InputStream in = codeLocation.openStream()) { + Files.copy(in, tmp, StandardCopyOption.REPLACE_EXISTING); + } + return FileSystems.newFileSystem(new URI("jar:" + tmp.toUri()), Collections.emptyMap()); + } catch (URISyntaxException e) { + throw new IOException("couldn't open zipfilesystem: ", e); + } + } else { + return null; + } + } + + @BeforeClass + public static void initExecutionContext() throws IOException { + String[] specPaths = resolvePathsProperty(REST_TESTS_SPEC, DEFAULT_SPEC_PATH); + RestSpec restSpec = null; + FileSystem fileSystem = getFileSystem(); + // don't make a try-with, getFileSystem returns null + // ... and you can't close() the default filesystem + try { + restSpec = RestSpec.parseFrom(fileSystem, DEFAULT_SPEC_PATH, specPaths); + } finally { + IOUtils.close(fileSystem); + } + validateSpec(restSpec); + restTestExecutionContext = new RestTestExecutionContext(restSpec); + adminExecutionContext = new RestTestExecutionContext(restSpec); + } + + protected RestTestExecutionContext getAdminExecutionContext() { + return adminExecutionContext; + } + + private static void validateSpec(RestSpec restSpec) { + boolean validateSpec = RandomizedTest.systemPropertyAsBoolean(REST_TESTS_VALIDATE_SPEC, true); + if (validateSpec) { + StringBuilder errorMessage = new StringBuilder(); + for (RestApi restApi : restSpec.getApis()) { + if (restApi.getMethods().contains("GET") && restApi.isBodySupported()) { + if (!restApi.getMethods().contains("POST")) { + errorMessage.append("\n- ").append(restApi.getName()).append(" supports GET with a body but doesn't support POST"); + } + } + } + if (errorMessage.length() > 0) { + throw new IllegalArgumentException(errorMessage.toString()); + } + } + } + + @AfterClass + public static void clearStatic() { + restTestExecutionContext = null; + adminExecutionContext = null; + } + + @Before + public void reset() throws IOException { + // admin context must be available for @After always, regardless of whether the test was blacklisted + adminExecutionContext.initClient(adminClient(), getClusterHosts()); + adminExecutionContext.clear(); + + //skip test if it matches one of the blacklist globs + for (BlacklistedPathPatternMatcher blacklistedPathMatcher : blacklistPathMatchers) { + String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName(); + assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", blacklistedPathMatcher + .isSuffixMatch(testPath)); + } + //The client needs non static info to get initialized, therefore it can't be initialized in the before class + restTestExecutionContext.initClient(client(), getClusterHosts()); + restTestExecutionContext.clear(); + + //skip test if the whole suite (yaml file) is disabled + assumeFalse(buildSkipMessage(testCandidate.getSuitePath(), testCandidate.getSetupSection().getSkipSection()), + testCandidate.getSetupSection().getSkipSection().skip(restTestExecutionContext.esVersion())); + //skip test if the whole suite (yaml file) is disabled + assumeFalse(buildSkipMessage(testCandidate.getSuitePath(), testCandidate.getTeardownSection().getSkipSection()), + testCandidate.getTeardownSection().getSkipSection().skip(restTestExecutionContext.esVersion())); + //skip test if test section is disabled + assumeFalse(buildSkipMessage(testCandidate.getTestPath(), testCandidate.getTestSection().getSkipSection()), + testCandidate.getTestSection().getSkipSection().skip(restTestExecutionContext.esVersion())); + } + + private static String buildSkipMessage(String description, SkipSection skipSection) { + StringBuilder messageBuilder = new StringBuilder(); + if (skipSection.isVersionCheck()) { + messageBuilder.append("[").append(description).append("] skipped, reason: [").append(skipSection.getReason()).append("] "); + } else { + messageBuilder.append("[").append(description).append("] skipped, reason: features ") + .append(skipSection.getFeatures()).append(" not supported"); + } + return messageBuilder.toString(); + } + + public void test() throws IOException { + //let's check that there is something to run, otherwise there might be a problem with the test section + if (testCandidate.getTestSection().getExecutableSections().size() == 0) { + throw new IllegalArgumentException("No executable sections loaded for [" + testCandidate.getTestPath() + "]"); + } + + if (!testCandidate.getSetupSection().isEmpty()) { + logger.debug("start setup test [{}]", testCandidate.getTestPath()); + for (DoSection doSection : testCandidate.getSetupSection().getDoSections()) { + doSection.execute(restTestExecutionContext); + } + logger.debug("end setup test [{}]", testCandidate.getTestPath()); + } + + restTestExecutionContext.clear(); + + try { + for (ExecutableSection executableSection : testCandidate.getTestSection().getExecutableSections()) { + executableSection.execute(restTestExecutionContext); + } + } finally { + logger.debug("start teardown test [{}]", testCandidate.getTestPath()); + for (DoSection doSection : testCandidate.getTeardownSection().getDoSections()) { + doSection.execute(restTestExecutionContext); + } + logger.debug("end teardown test [{}]", testCandidate.getTestPath()); + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index bc16f4f7477..573c301105a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -19,289 +19,149 @@ package org.elasticsearch.test.rest; -import com.carrotsearch.randomizedtesting.RandomizedTest; +import org.apache.http.Header; +import org.apache.http.HttpHost; +import org.apache.http.message.BasicHeader; +import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; +import org.apache.http.ssl.SSLContexts; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.client.RestTestResponse; -import org.elasticsearch.test.rest.client.RestTestResponseException; -import org.elasticsearch.test.rest.parser.RestTestParseException; -import org.elasticsearch.test.rest.parser.RestTestSuiteParser; -import org.elasticsearch.test.rest.section.DoSection; -import org.elasticsearch.test.rest.section.ExecutableSection; -import org.elasticsearch.test.rest.section.RestTestSuite; -import org.elasticsearch.test.rest.section.SkipSection; -import org.elasticsearch.test.rest.section.TestSection; -import org.elasticsearch.test.rest.spec.RestApi; -import org.elasticsearch.test.rest.spec.RestSpec; -import org.elasticsearch.test.rest.support.FileUtils; import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; import java.io.IOException; import java.io.InputStream; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.file.FileSystem; -import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.StandardCopyOption; +import java.security.KeyManagementException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; +import javax.net.ssl.SSLContext; + import static java.util.Collections.sort; +import static java.util.Collections.unmodifiableList; /** - * Runs the clients test suite against an elasticsearch cluster. + * Superclass for tests that interact with an external test cluster using Elasticsearch's {@link RestClient}. */ -public abstract class ESRestTestCase extends ESTestCase { +public class ESRestTestCase extends ESTestCase { + public static final String TRUSTSTORE_PATH = "truststore.path"; + public static final String TRUSTSTORE_PASSWORD = "truststore.password"; /** - * Property that allows to control which REST tests get run. Supports comma separated list of tests - * or directories that contain tests e.g. -Dtests.rest.suite=index,get,create/10_with_id + * Convert the entity from a {@link Response} into a map of maps. */ - public static final String REST_TESTS_SUITE = "tests.rest.suite"; - /** - * Property that allows to blacklist some of the REST tests based on a comma separated list of globs - * e.g. -Dtests.rest.blacklist=get/10_basic/* - */ - public static final String REST_TESTS_BLACKLIST = "tests.rest.blacklist"; - /** - * Property that allows to control whether spec validation is enabled or not (default true). - */ - public static final String REST_TESTS_VALIDATE_SPEC = "tests.rest.validate_spec"; - /** - * Property that allows to control where the REST spec files need to be loaded from - */ - public static final String REST_TESTS_SPEC = "tests.rest.spec"; - - public static final String REST_LOAD_PACKAGED_TESTS = "tests.rest.load_packaged"; - - private static final String DEFAULT_TESTS_PATH = "/rest-api-spec/test"; - private static final String DEFAULT_SPEC_PATH = "/rest-api-spec/api"; - - /** - * This separator pattern matches ',' except it is preceded by a '\'. - * This allows us to support ',' within paths when it is escaped with a slash. - * - * For example, the path string "/a/b/c\,d/e/f,/foo/bar,/baz" is separated to "/a/b/c\,d/e/f", "/foo/bar" and "/baz". - * - * For reference, this regular expression feature is known as zero-width negative look-behind. - * - */ - private static final String PATHS_SEPARATOR = "(? blacklistPathMatchers = new ArrayList<>(); - private final URL[] clusterUrls; - private static RestTestExecutionContext restTestExecutionContext; - private static RestTestExecutionContext adminExecutionContext; - - private final RestTestCandidate testCandidate; - - public ESRestTestCase(RestTestCandidate testCandidate) { - this.testCandidate = testCandidate; - String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); - for (String entry : blacklist) { - this.blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); + public static Map entityAsMap(Response response) throws IOException { + XContentType xContentType = XContentType.fromMediaTypeOrFormat(response.getEntity().getContentType().getValue()); + try (XContentParser parser = xContentType.xContent().createParser(response.getEntity().getContent())) { + return parser.map(); } + } + + private final List clusterHosts; + /** + * A client for the running Elasticsearch cluster. Lazily initialized on first use. + */ + private final RestClient client; + /** + * A client for the running Elasticsearch cluster configured to take test administrative actions like remove all indexes after the test + * completes. Lazily initialized on first use. + */ + private final RestClient adminClient; + + public ESRestTestCase() { String cluster = System.getProperty("tests.rest.cluster"); if (cluster == null) { - throw new RuntimeException("Must specify tests.rest.cluster for rest tests"); + throw new RuntimeException("Must specify [tests.rest.cluster] system property with a comma delimited list of [host:port] " + + "to which to send REST requests"); } String[] stringUrls = cluster.split(","); - clusterUrls = new URL[stringUrls.length]; - int i = 0; - try { - for (String stringUrl : stringUrls) { - clusterUrls[i++] = new URL("http://" + stringUrl); + List clusterHosts = new ArrayList<>(stringUrls.length); + for (String stringUrl : stringUrls) { + int portSeparator = stringUrl.lastIndexOf(':'); + if (portSeparator < 0) { + throw new IllegalArgumentException("Illegal cluster url [" + stringUrl + "]"); } + String host = stringUrl.substring(0, portSeparator); + int port = Integer.valueOf(stringUrl.substring(portSeparator + 1)); + clusterHosts.add(new HttpHost(host, port, getProtocol())); + } + this.clusterHosts = unmodifiableList(clusterHosts); + try { + client = buildClient(restClientSettings()); + adminClient = buildClient(restAdminSettings()); } catch (IOException e) { - throw new RuntimeException("Failed to parse cluster addresses for rest test", e); - } - } - - @Override - protected void afterIfFailed(List errors) { - logger.info("Stash dump on failure [{}]", XContentHelper.toString(restTestExecutionContext.stash())); - super.afterIfFailed(errors); - } - - public static Iterable createParameters(int id, int count) throws IOException, RestTestParseException { - //parse tests only if rest test group is enabled, otherwise rest tests might not even be available on file system - List restTestCandidates = collectTestCandidates(id, count); - List objects = new ArrayList<>(); - for (RestTestCandidate restTestCandidate : restTestCandidates) { - objects.add(new Object[]{restTestCandidate}); - } - return objects; - } - - private static List collectTestCandidates(int id, int count) throws RestTestParseException, IOException { - List testCandidates = new ArrayList<>(); - FileSystem fileSystem = getFileSystem(); - // don't make a try-with, getFileSystem returns null - // ... and you can't close() the default filesystem - try { - String[] paths = resolvePathsProperty(REST_TESTS_SUITE, DEFAULT_TESTS_PATH); - Map> yamlSuites = FileUtils.findYamlSuites(fileSystem, DEFAULT_TESTS_PATH, paths); - RestTestSuiteParser restTestSuiteParser = new RestTestSuiteParser(); - //yaml suites are grouped by directory (effectively by api) - for (String api : yamlSuites.keySet()) { - List yamlFiles = new ArrayList<>(yamlSuites.get(api)); - for (Path yamlFile : yamlFiles) { - String key = api + yamlFile.getFileName().toString(); - if (mustExecute(key, id, count)) { - RestTestSuite restTestSuite = restTestSuiteParser.parse(api, yamlFile); - for (TestSection testSection : restTestSuite.getTestSections()) { - testCandidates.add(new RestTestCandidate(restTestSuite, testSection)); - } - } - } - } - } finally { - IOUtils.close(fileSystem); - } - - //sort the candidates so they will always be in the same order before being shuffled, for repeatability - Collections.sort(testCandidates, new Comparator() { - @Override - public int compare(RestTestCandidate o1, RestTestCandidate o2) { - return o1.getTestPath().compareTo(o2.getTestPath()); - } - }); - - return testCandidates; - } - - private static boolean mustExecute(String test, int id, int count) { - int hash = (int) (Math.abs((long)test.hashCode()) % count); - return hash == id; - } - - private static String[] resolvePathsProperty(String propertyName, String defaultValue) { - String property = System.getProperty(propertyName); - if (!Strings.hasLength(property)) { - return defaultValue == null ? Strings.EMPTY_ARRAY : new String[]{defaultValue}; - } else { - return property.split(PATHS_SEPARATOR); + // Wrap the IOException so children don't have to declare a constructor just to rethrow it. + throw new RuntimeException("Error building clients", e); } } /** - * Returns a new FileSystem to read REST resources, or null if they - * are available from classpath. + * Clean up after the test case. */ - @SuppressForbidden(reason = "proper use of URL, hack around a JDK bug") - static FileSystem getFileSystem() throws IOException { - // REST suite handling is currently complicated, with lots of filtering and so on - // For now, to work embedded in a jar, return a ZipFileSystem over the jar contents. - URL codeLocation = FileUtils.class.getProtectionDomain().getCodeSource().getLocation(); - boolean loadPackaged = RandomizedTest.systemPropertyAsBoolean(REST_LOAD_PACKAGED_TESTS, true); - if (codeLocation.getFile().endsWith(".jar") && loadPackaged) { - try { - // hack around a bug in the zipfilesystem implementation before java 9, - // its checkWritable was incorrect and it won't work without write permissions. - // if we add the permission, it will open jars r/w, which is too scary! so copy to a safe r-w location. - Path tmp = Files.createTempFile(null, ".jar"); - try (InputStream in = codeLocation.openStream()) { - Files.copy(in, tmp, StandardCopyOption.REPLACE_EXISTING); - } - return FileSystems.newFileSystem(new URI("jar:" + tmp.toUri()), Collections.emptyMap()); - } catch (URISyntaxException e) { - throw new IOException("couldn't open zipfilesystem: ", e); - } - } else { - return null; - } - } - - @BeforeClass - public static void initExecutionContext() throws IOException { - String[] specPaths = resolvePathsProperty(REST_TESTS_SPEC, DEFAULT_SPEC_PATH); - RestSpec restSpec = null; - FileSystem fileSystem = getFileSystem(); - // don't make a try-with, getFileSystem returns null - // ... and you can't close() the default filesystem - try { - restSpec = RestSpec.parseFrom(fileSystem, DEFAULT_SPEC_PATH, specPaths); - } finally { - IOUtils.close(fileSystem); - } - validateSpec(restSpec); - restTestExecutionContext = new RestTestExecutionContext(restSpec); - adminExecutionContext = new RestTestExecutionContext(restSpec); - } - - protected RestTestExecutionContext getAdminExecutionContext() { - return adminExecutionContext; - } - - private static void validateSpec(RestSpec restSpec) { - boolean validateSpec = RandomizedTest.systemPropertyAsBoolean(REST_TESTS_VALIDATE_SPEC, true); - if (validateSpec) { - StringBuilder errorMessage = new StringBuilder(); - for (RestApi restApi : restSpec.getApis()) { - if (restApi.getMethods().contains("GET") && restApi.isBodySupported()) { - if (!restApi.getMethods().contains("POST")) { - errorMessage.append("\n- ").append(restApi.getName()).append(" supports GET with a body but doesn't support POST"); - } - } - } - if (errorMessage.length() > 0) { - throw new IllegalArgumentException(errorMessage.toString()); - } - } - } - @After - public void wipeCluster() throws IOException { + public final void after() throws Exception { + wipeCluster(); + logIfThereAreRunningTasks(); + closeClients(); + } + + /** + * Get a client, building it if it hasn't been built for this test. + */ + protected final RestClient client() { + return client; + } + + /** + * Get the client used for test administrative actions. Do not use this while writing a test. Only use it for cleaning up after tests. + */ + protected final RestClient adminClient() { + return adminClient; + } + + private void wipeCluster() throws IOException { // wipe indices - Map deleteIndicesArgs = new HashMap<>(); - deleteIndicesArgs.put("index", "*"); try { - adminExecutionContext.callApi("indices.delete", deleteIndicesArgs, Collections.emptyList(), Collections.emptyMap()); - } catch (RestTestResponseException e) { + adminClient().performRequest("DELETE", "*"); + } catch (ResponseException e) { // 404 here just means we had no indexes - if (e.getResponseException().getResponse().getStatusLine().getStatusCode() != 404) { + if (e.getResponse().getStatusLine().getStatusCode() != 404) { throw e; } } // wipe index templates - Map deleteTemplatesArgs = new HashMap<>(); - deleteTemplatesArgs.put("name", "*"); - adminExecutionContext.callApi("indices.delete_template", deleteTemplatesArgs, Collections.emptyList(), Collections.emptyMap()); + adminClient().performRequest("DELETE", "_template/*"); // wipe snapshots - Map deleteSnapshotsArgs = new HashMap<>(); - deleteSnapshotsArgs.put("repository", "*"); - adminExecutionContext.callApi("snapshot.delete_repository", deleteSnapshotsArgs, Collections.emptyList(), Collections.emptyMap()); + // Technically this deletes all repositories and leave the snapshots in the repository. OK. + adminClient().performRequest("DELETE", "_snapshot/*"); } /** * Logs a message if there are still running tasks. The reasoning is that any tasks still running are state the is trying to bleed into * other tests. */ - @After - public void logIfThereAreRunningTasks() throws IOException { - RestTestResponse tasks = adminExecutionContext.callApi("tasks.list", emptyMap(), emptyList(), emptyMap()); - Set runningTasks = runningTasks(tasks); + private void logIfThereAreRunningTasks() throws InterruptedException, IOException { + Set runningTasks = runningTasks(adminClient().performRequest("GET", "_tasks")); // Ignore the task list API - it doens't count against us runningTasks.remove(ListTasksAction.NAME); runningTasks.remove(ListTasksAction.NAME + "[n]"); @@ -318,14 +178,8 @@ public abstract class ESRestTestCase extends ESTestCase { */ } - @AfterClass - public static void close() { - if (restTestExecutionContext != null) { - restTestExecutionContext.close(); - adminExecutionContext.close(); - restTestExecutionContext = null; - adminExecutionContext = null; - } + private void closeClients() throws IOException { + IOUtils.close(client, adminClient); } /** @@ -335,82 +189,69 @@ public abstract class ESRestTestCase extends ESTestCase { return Settings.EMPTY; } - /** Returns the REST client settings used for admin actions like cleaning up after the test has completed. */ + /** + * Returns the REST client settings used for admin actions like cleaning up after the test has completed. + */ protected Settings restAdminSettings() { return restClientSettings(); // default to the same client settings } - @Before - public void reset() throws Exception { - // admin context must be available for @After always, regardless of whether the test was blacklisted - adminExecutionContext.initClient(clusterUrls, restAdminSettings()); - adminExecutionContext.clear(); - - //skip test if it matches one of the blacklist globs - for (BlacklistedPathPatternMatcher blacklistedPathMatcher : blacklistPathMatchers) { - String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName(); - assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", blacklistedPathMatcher - .isSuffixMatch(testPath)); - } - //The client needs non static info to get initialized, therefore it can't be initialized in the before class - restTestExecutionContext.initClient(clusterUrls, restClientSettings()); - restTestExecutionContext.clear(); - - //skip test if the whole suite (yaml file) is disabled - assumeFalse(buildSkipMessage(testCandidate.getSuitePath(), testCandidate.getSetupSection().getSkipSection()), - testCandidate.getSetupSection().getSkipSection().skip(restTestExecutionContext.esVersion())); - //skip test if the whole suite (yaml file) is disabled - assumeFalse(buildSkipMessage(testCandidate.getSuitePath(), testCandidate.getTeardownSection().getSkipSection()), - testCandidate.getTeardownSection().getSkipSection().skip(restTestExecutionContext.esVersion())); - //skip test if test section is disabled - assumeFalse(buildSkipMessage(testCandidate.getTestPath(), testCandidate.getTestSection().getSkipSection()), - testCandidate.getTestSection().getSkipSection().skip(restTestExecutionContext.esVersion())); + /** + * Get the list of hosts in the cluster. + */ + protected final List getClusterHosts() { + return clusterHosts; } - private static String buildSkipMessage(String description, SkipSection skipSection) { - StringBuilder messageBuilder = new StringBuilder(); - if (skipSection.isVersionCheck()) { - messageBuilder.append("[").append(description).append("] skipped, reason: [").append(skipSection.getReason()).append("] "); - } else { - messageBuilder.append("[").append(description).append("] skipped, reason: features ") - .append(skipSection.getFeatures()).append(" not supported"); - } - return messageBuilder.toString(); + /** + * Override this to switch to testing https. + */ + protected String getProtocol() { + return "http"; } - public void test() throws IOException { - //let's check that there is something to run, otherwise there might be a problem with the test section - if (testCandidate.getTestSection().getExecutableSections().size() == 0) { - throw new IllegalArgumentException("No executable sections loaded for [" + testCandidate.getTestPath() + "]"); + private RestClient buildClient(Settings settings) throws IOException { + RestClientBuilder builder = RestClient.builder(clusterHosts.toArray(new HttpHost[0])).setMaxRetryTimeoutMillis(30000) + .setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setSocketTimeout(30000)); + String keystorePath = settings.get(TRUSTSTORE_PATH); + if (keystorePath != null) { + final String keystorePass = settings.get(TRUSTSTORE_PASSWORD); + if (keystorePass == null) { + throw new IllegalStateException(TRUSTSTORE_PATH + " is provided but not " + TRUSTSTORE_PASSWORD); + } + Path path = PathUtils.get(keystorePath); + if (!Files.exists(path)) { + throw new IllegalStateException(TRUSTSTORE_PATH + " is set but points to a non-existing file"); + } + try { + KeyStore keyStore = KeyStore.getInstance("jks"); + try (InputStream is = Files.newInputStream(path)) { + keyStore.load(is, keystorePass.toCharArray()); + } + SSLContext sslcontext = SSLContexts.custom().loadTrustMaterial(keyStore, null).build(); + SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(sslcontext); + builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy)); + } catch (KeyStoreException|NoSuchAlgorithmException|KeyManagementException|CertificateException e) { + throw new RuntimeException("Error setting up ssl", e); + } } - if (!testCandidate.getSetupSection().isEmpty()) { - logger.debug("start setup test [{}]", testCandidate.getTestPath()); - for (DoSection doSection : testCandidate.getSetupSection().getDoSections()) { - doSection.execute(restTestExecutionContext); + try (ThreadContext threadContext = new ThreadContext(settings)) { + Header[] defaultHeaders = new Header[threadContext.getHeaders().size()]; + int i = 0; + for (Map.Entry entry : threadContext.getHeaders().entrySet()) { + defaultHeaders[i++] = new BasicHeader(entry.getKey(), entry.getValue()); } - logger.debug("end setup test [{}]", testCandidate.getTestPath()); - } - - restTestExecutionContext.clear(); - - try { - for (ExecutableSection executableSection : testCandidate.getTestSection().getExecutableSections()) { - executableSection.execute(restTestExecutionContext); - } - } finally { - logger.debug("start teardown test [{}]", testCandidate.getTestPath()); - for (DoSection doSection : testCandidate.getTeardownSection().getDoSections()) { - doSection.execute(restTestExecutionContext); - } - logger.debug("end teardown test [{}]", testCandidate.getTestPath()); + builder.setDefaultHeaders(defaultHeaders); } + return builder.build(); } @SuppressWarnings("unchecked") - public Set runningTasks(RestTestResponse response) throws IOException { + private Set runningTasks(Response response) throws IOException { Set runningTasks = new HashSet<>(); - Map nodes = (Map) response.evaluate("nodes"); + + Map nodes = (Map) entityAsMap(response).get("nodes"); for (Map.Entry node : nodes.entrySet()) { Map nodeInfo = (Map) node.getValue(); Map nodeTasks = (Map) nodeInfo.get("tasks"); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java index 7ed46be4d8a..59d45bc0a88 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java @@ -18,19 +18,18 @@ */ package org.elasticsearch.test.rest; +import org.apache.http.HttpHost; import org.elasticsearch.Version; +import org.elasticsearch.client.RestClient; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.rest.client.RestTestClient; import org.elasticsearch.test.rest.client.RestTestResponse; import org.elasticsearch.test.rest.client.RestTestResponseException; import org.elasticsearch.test.rest.spec.RestSpec; -import java.io.Closeable; import java.io.IOException; -import java.net.URL; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -41,7 +40,7 @@ import java.util.Map; * Caches the last obtained test response and allows to stash part of it within variables * that can be used as input values in following requests. */ -public class RestTestExecutionContext implements Closeable { +public class RestTestExecutionContext { private static final ESLogger logger = Loggers.getLogger(RestTestExecutionContext.class); @@ -119,10 +118,8 @@ public class RestTestExecutionContext implements Closeable { /** * Creates the embedded REST client when needed. Needs to be called before each test. */ - public void initClient(URL[] urls, Settings settings) throws Exception { - if (restTestClient == null) { - restTestClient = new RestTestClient(restSpec, settings, urls); - } + public void initClient(RestClient client, List hosts) throws IOException { + restTestClient = new RestTestClient(restSpec, client, hosts); } /** @@ -145,13 +142,4 @@ public class RestTestExecutionContext implements Closeable { return restTestClient.getEsVersion(); } - /** - * Closes the execution context and releases the underlying resources - */ - @Override - public void close() { - if (restTestClient != null) { - restTestClient.close(); - } - } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java index 56bca24532c..2fb0374d048 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java @@ -19,44 +19,27 @@ package org.elasticsearch.test.rest.client; import com.carrotsearch.randomizedtesting.RandomizedTest; + import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHeader; -import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; -import org.apache.http.ssl.SSLContexts; -import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.rest.spec.RestApi; import org.elasticsearch.test.rest.spec.RestSpec; -import javax.net.ssl.SSLContext; -import java.io.Closeable; import java.io.IOException; -import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; -import java.net.URL; -import java.nio.file.Files; -import java.nio.file.Path; -import java.security.KeyManagementException; -import java.security.KeyStore; -import java.security.KeyStoreException; -import java.security.NoSuchAlgorithmException; -import java.security.cert.CertificateException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -70,35 +53,29 @@ import java.util.Set; * Wraps a {@link RestClient} instance used to send the REST requests. * Holds the {@link RestSpec} used to translate api calls into REST calls */ -public class RestTestClient implements Closeable { - - public static final String PROTOCOL = "protocol"; - public static final String TRUSTSTORE_PATH = "truststore.path"; - public static final String TRUSTSTORE_PASSWORD = "truststore.password"; - +public class RestTestClient { private static final ESLogger logger = Loggers.getLogger(RestTestClient.class); - //query_string params that don't need to be declared in the spec, thay are supported by default + //query_string params that don't need to be declared in the spec, they are supported by default private static final Set ALWAYS_ACCEPTED_QUERY_STRING_PARAMS = Sets.newHashSet("pretty", "source", "filter_path"); private final RestSpec restSpec; private final RestClient restClient; private final Version esVersion; - public RestTestClient(RestSpec restSpec, Settings settings, URL[] urls) throws IOException { - assert urls.length > 0; + public RestTestClient(RestSpec restSpec, RestClient restClient, List hosts) throws IOException { + assert hosts.size() > 0; this.restSpec = restSpec; - this.restClient = createRestClient(urls, settings); - this.esVersion = readAndCheckVersion(urls); - logger.info("REST client initialized {}, elasticsearch version: [{}]", urls, esVersion); + this.restClient = restClient; + this.esVersion = readAndCheckVersion(hosts); } - private Version readAndCheckVersion(URL[] urls) throws IOException { + private Version readAndCheckVersion(List hosts) throws IOException { RestApi restApi = restApi("info"); assert restApi.getPaths().size() == 1; assert restApi.getMethods().size() == 1; String version = null; - for (URL ignored : urls) { + for (HttpHost ignored : hosts) { //we don't really use the urls here, we rely on the client doing round-robin to touch all the nodes in the cluster String method = restApi.getMethods().get(0); String endpoint = restApi.getPaths().get(0); @@ -264,56 +241,4 @@ public class RestTestClient implements Closeable { } return restApi; } - - private static RestClient createRestClient(URL[] urls, Settings settings) throws IOException { - String protocol = settings.get(PROTOCOL, "http"); - HttpHost[] hosts = new HttpHost[urls.length]; - for (int i = 0; i < hosts.length; i++) { - URL url = urls[i]; - hosts[i] = new HttpHost(url.getHost(), url.getPort(), protocol); - } - RestClientBuilder builder = RestClient.builder(hosts).setMaxRetryTimeoutMillis(30000) - .setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setSocketTimeout(30000)); - - String keystorePath = settings.get(TRUSTSTORE_PATH); - if (keystorePath != null) { - final String keystorePass = settings.get(TRUSTSTORE_PASSWORD); - if (keystorePass == null) { - throw new IllegalStateException(TRUSTSTORE_PATH + " is provided but not " + TRUSTSTORE_PASSWORD); - } - Path path = PathUtils.get(keystorePath); - if (!Files.exists(path)) { - throw new IllegalStateException(TRUSTSTORE_PATH + " is set but points to a non-existing file"); - } - try { - KeyStore keyStore = KeyStore.getInstance("jks"); - try (InputStream is = Files.newInputStream(path)) { - keyStore.load(is, keystorePass.toCharArray()); - } - SSLContext sslcontext = SSLContexts.custom().loadTrustMaterial(keyStore, null).build(); - SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(sslcontext); - builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy)); - } catch (KeyStoreException|NoSuchAlgorithmException|KeyManagementException|CertificateException e) { - throw new RuntimeException(e); - } - } - - try (ThreadContext threadContext = new ThreadContext(settings)) { - Header[] defaultHeaders = new Header[threadContext.getHeaders().size()]; - int i = 0; - for (Map.Entry entry : threadContext.getHeaders().entrySet()) { - defaultHeaders[i++] = new BasicHeader(entry.getKey(), entry.getValue()); - } - builder.setDefaultHeaders(defaultHeaders); - } - return builder.build(); - } - - /** - * Closes the REST client and the underlying http client - */ - @Override - public void close() { - IOUtils.closeWhileHandlingException(restClient); - } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java b/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java index 3c78d432f19..98bce54082f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java @@ -24,6 +24,8 @@ import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; import java.util.List; +import static java.util.Collections.unmodifiableList; + /** * Allows to register additional features supported by the tests runner. * This way any runner can add extra features and use proper skip sections to avoid @@ -34,8 +36,12 @@ import java.util.List; */ public final class Features { - private static final List SUPPORTED = - Arrays.asList("stash_in_path", "groovy_scripting", "headers", "embedded_stash_key", "yaml"); + private static final List SUPPORTED = unmodifiableList(Arrays.asList( + "embedded_stash_key", + "groovy_scripting", + "headers", + "stash_in_path", + "yaml")); private Features() { From 0ecaa6ec3cbe250545097ee792e4f32989dafa94 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 25 Jul 2016 17:05:40 -0700 Subject: [PATCH 59/93] Build: Allow license header check to be customized This change allows setting which license families are approved, as well as adding matchers for additional license types. --- .../precommit/LicenseHeadersTask.groovy | 91 ++++++++++++------- 1 file changed, 60 insertions(+), 31 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy index 9bb8c4ae83d..cee01f5380a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy @@ -23,6 +23,7 @@ import org.apache.rat.anttasks.SubstringLicenseMatcher import org.apache.rat.license.SimpleLicenseFamily import org.elasticsearch.gradle.AntTask import org.gradle.api.file.FileCollection +import org.gradle.api.tasks.Input import org.gradle.api.tasks.OutputFile import org.gradle.api.tasks.SourceSet @@ -44,6 +45,16 @@ public class LicenseHeadersTask extends AntTask { */ protected List javaFiles + /** Allowed license families for this project. */ + @Input + List approvedLicenses = ['Apache', 'Generated'] + + /** + * Additional license families that may be found. The key is the license category name (5 characters), + * followed by the family name and the value list of patterns to search for. + */ + protected Map additionalLicenses = [:] + LicenseHeadersTask() { description = "Checks sources for missing, incorrect, or unacceptable license headers" // Delay resolving the dependencies until after evaluation so we pick up generated sources @@ -53,6 +64,13 @@ public class LicenseHeadersTask extends AntTask { } } + public void additionalLicense(String categoryName, String familyName, String pattern) { + if (categoryName.length() != 5) { + throw new IllegalArgumentException("License category name must be exactly 5 characters, got ${categoryName}"); + } + additionalLicenses.put("${categoryName}${familyName}", pattern); + } + @Override protected void runAnt(AntBuilder ant) { ant.project.addTaskDefinition('ratReport', Report) @@ -64,43 +82,54 @@ public class LicenseHeadersTask extends AntTask { // run rat, going to the file List input = javaFiles ant.ratReport(reportFile: reportFile.absolutePath, addDefaultLicenseMatchers: true) { - for (FileCollection dirSet : input) { - for (File dir: dirSet.srcDirs) { - // sometimes these dirs don't exist, e.g. site-plugin has no actual java src/main... - if (dir.exists()) { - ant.fileset(dir: dir) - } + for (FileCollection dirSet : input) { + for (File dir: dirSet.srcDirs) { + // sometimes these dirs don't exist, e.g. site-plugin has no actual java src/main... + if (dir.exists()) { + ant.fileset(dir: dir) } } + } - // BSD 4-clause stuff (is disallowed below) - // we keep this here, in case someone adds BSD code for some reason, it should never be allowed. - substringMatcher(licenseFamilyCategory: "BSD4 ", - licenseFamilyName: "Original BSD License (with advertising clause)") { - pattern(substring: "All advertising materials") - } + // BSD 4-clause stuff (is disallowed below) + // we keep this here, in case someone adds BSD code for some reason, it should never be allowed. + substringMatcher(licenseFamilyCategory: "BSD4 ", + licenseFamilyName: "Original BSD License (with advertising clause)") { + pattern(substring: "All advertising materials") + } - // Apache - substringMatcher(licenseFamilyCategory: "AL ", - licenseFamilyName: "Apache") { - // Apache license (ES) - pattern(substring: "Licensed to Elasticsearch under one or more contributor") - // Apache license (ASF) - pattern(substring: "Licensed to the Apache Software Foundation (ASF) under") - // this is the old-school one under some files - pattern(substring: "Licensed under the Apache License, Version 2.0 (the \"License\")") - } + // Apache + substringMatcher(licenseFamilyCategory: "AL ", + licenseFamilyName: "Apache") { + // Apache license (ES) + pattern(substring: "Licensed to Elasticsearch under one or more contributor") + // Apache license (ASF) + pattern(substring: "Licensed to the Apache Software Foundation (ASF) under") + // this is the old-school one under some files + pattern(substring: "Licensed under the Apache License, Version 2.0 (the \"License\")") + } - // Generated resources - substringMatcher(licenseFamilyCategory: "GEN ", - licenseFamilyName: "Generated") { - // parsers generated by antlr - pattern(substring: "ANTLR GENERATED CODE") - } + // Generated resources + substringMatcher(licenseFamilyCategory: "GEN ", + licenseFamilyName: "Generated") { + // parsers generated by antlr + pattern(substring: "ANTLR GENERATED CODE") + } - // approved categories - approvedLicense(familyName: "Apache") - approvedLicense(familyName: "Generated") + // license types added by the project + for (Map.Entry additional : additionalLicenses.entrySet()) { + String category = additional.getKey().substring(0, 5) + String family = additional.getKey().substring(5) + substringMatcher(licenseFamilyCategory: category, + licenseFamilyName: family) { + pattern(substring: additional.getValue()) + } + } + + // approved categories + for (String licenseFamily : approvedLicenses) { + approvedLicense(familyName: licenseFamily) + } } // check the license file for any errors, this should be fast. From 0e6bdd025da322a2b0a35fd25d6bb6fedcbfda96 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 25 Jul 2016 17:17:12 -0700 Subject: [PATCH 60/93] Construct hashmap directly instead of using crazy groovy syntax --- .../elasticsearch/gradle/precommit/LicenseHeadersTask.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy index cee01f5380a..820fbe34335 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy @@ -53,7 +53,7 @@ public class LicenseHeadersTask extends AntTask { * Additional license families that may be found. The key is the license category name (5 characters), * followed by the family name and the value list of patterns to search for. */ - protected Map additionalLicenses = [:] + protected Map additionalLicenses = new HashMap<>() LicenseHeadersTask() { description = "Checks sources for missing, incorrect, or unacceptable license headers" From c2c9b517327a0fed11dc82f6c9a953f36441f091 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 25 Jul 2016 17:22:08 -0700 Subject: [PATCH 61/93] Add javadocs for adding additional license types to the license headers check --- .../gradle/precommit/LicenseHeadersTask.groovy | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy index 820fbe34335..72853d0ae82 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy @@ -64,6 +64,15 @@ public class LicenseHeadersTask extends AntTask { } } + /** + * Add a new license type. + * + * The license may be added to the {@link #approvedLicenses} using the {@code familyName}. + * + * @param categoryName A 5-character string identifier for the license + * @param familyName An expanded string name for the license + * @param pattern A pattern to search for, which if found, indicates a file contains the license + */ public void additionalLicense(String categoryName, String familyName, String pattern) { if (categoryName.length() != 5) { throw new IllegalArgumentException("License category name must be exactly 5 characters, got ${categoryName}"); From 7bf6676d21ebec537145cfe962814a7808e27835 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 25 Jul 2016 17:30:15 -0700 Subject: [PATCH 62/93] Use string concat instead of gstring --- .../elasticsearch/gradle/precommit/LicenseHeadersTask.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy index 72853d0ae82..fccbb43e872 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LicenseHeadersTask.groovy @@ -77,7 +77,7 @@ public class LicenseHeadersTask extends AntTask { if (categoryName.length() != 5) { throw new IllegalArgumentException("License category name must be exactly 5 characters, got ${categoryName}"); } - additionalLicenses.put("${categoryName}${familyName}", pattern); + additionalLicenses.put(categoryName + familyName, pattern); } @Override From d7d3b1a874457c0876804bde6a9371177d30dc96 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Tue, 26 Jul 2016 06:29:04 +0200 Subject: [PATCH 63/93] Remove duplicate dependency declaration for http client (#19580) We disable transitive dependencies in our build plugin for all dependencies except for the group `org.elasticsearch`. However, in the reindex plugin we depend on the REST client and declare its dependencies again which is not necessary (and led to problems with conflicting versions in #19281). With this PR we remove the duplicate declaration. --- modules/reindex/build.gradle | 5 - .../licenses/commons-codec-1.10.jar.sha1 | 1 - .../licenses/commons-codec-LICENSE.txt | 202 ------- .../reindex/licenses/commons-codec-NOTICE.txt | 17 - .../licenses/commons-logging-1.1.3.jar.sha1 | 1 - .../licenses/commons-logging-LICENSE.txt | 202 ------- .../licenses/commons-logging-NOTICE.txt | 5 - .../licenses/httpclient-4.5.2.jar.sha1 | 1 - .../reindex/licenses/httpclient-LICENSE.txt | 558 ------------------ .../reindex/licenses/httpclient-NOTICE.txt | 5 - .../reindex/licenses/httpcore-4.4.5.jar.sha1 | 1 - modules/reindex/licenses/httpcore-LICENSE.txt | 241 -------- modules/reindex/licenses/httpcore-NOTICE.txt | 8 - 13 files changed, 1247 deletions(-) delete mode 100644 modules/reindex/licenses/commons-codec-1.10.jar.sha1 delete mode 100644 modules/reindex/licenses/commons-codec-LICENSE.txt delete mode 100644 modules/reindex/licenses/commons-codec-NOTICE.txt delete mode 100644 modules/reindex/licenses/commons-logging-1.1.3.jar.sha1 delete mode 100644 modules/reindex/licenses/commons-logging-LICENSE.txt delete mode 100644 modules/reindex/licenses/commons-logging-NOTICE.txt delete mode 100644 modules/reindex/licenses/httpclient-4.5.2.jar.sha1 delete mode 100644 modules/reindex/licenses/httpclient-LICENSE.txt delete mode 100644 modules/reindex/licenses/httpclient-NOTICE.txt delete mode 100644 modules/reindex/licenses/httpcore-4.4.5.jar.sha1 delete mode 100644 modules/reindex/licenses/httpcore-LICENSE.txt delete mode 100644 modules/reindex/licenses/httpcore-NOTICE.txt diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index e159b3af8c5..fa4c44d9224 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -37,11 +37,6 @@ run { dependencies { compile "org.elasticsearch.client:rest:${version}" - // dependencies of the rest client - compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" - compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" - compile "commons-codec:commons-codec:${versions.commonscodec}" - compile "commons-logging:commons-logging:${versions.commonslogging}" // for http - testing reindex from remote testCompile project(path: ':modules:transport-netty3', configuration: 'runtime') testCompile project(path: ':modules:transport-netty4', configuration: 'runtime') diff --git a/modules/reindex/licenses/commons-codec-1.10.jar.sha1 b/modules/reindex/licenses/commons-codec-1.10.jar.sha1 deleted file mode 100644 index 3fe8682a1b0..00000000000 --- a/modules/reindex/licenses/commons-codec-1.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4b95f4897fa13f2cd904aee711aeafc0c5295cd8 \ No newline at end of file diff --git a/modules/reindex/licenses/commons-codec-LICENSE.txt b/modules/reindex/licenses/commons-codec-LICENSE.txt deleted file mode 100644 index d6456956733..00000000000 --- a/modules/reindex/licenses/commons-codec-LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/modules/reindex/licenses/commons-codec-NOTICE.txt b/modules/reindex/licenses/commons-codec-NOTICE.txt deleted file mode 100644 index 56916449bbe..00000000000 --- a/modules/reindex/licenses/commons-codec-NOTICE.txt +++ /dev/null @@ -1,17 +0,0 @@ -Apache Commons Codec -Copyright 2002-2015 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - -src/test/org/apache/commons/codec/language/DoubleMetaphoneTest.java -contains test data from http://aspell.net/test/orig/batch0.tab. -Copyright (C) 2002 Kevin Atkinson (kevina@gnu.org) - -=============================================================================== - -The content of package org.apache.commons.codec.language.bm has been translated -from the original php source code available at http://stevemorse.org/phoneticinfo.htm -with permission from the original authors. -Original source copyright: -Copyright (c) 2008 Alexander Beider & Stephen P. Morse. diff --git a/modules/reindex/licenses/commons-logging-1.1.3.jar.sha1 b/modules/reindex/licenses/commons-logging-1.1.3.jar.sha1 deleted file mode 100644 index c8756c43832..00000000000 --- a/modules/reindex/licenses/commons-logging-1.1.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f diff --git a/modules/reindex/licenses/commons-logging-LICENSE.txt b/modules/reindex/licenses/commons-logging-LICENSE.txt deleted file mode 100644 index 57bc88a15a0..00000000000 --- a/modules/reindex/licenses/commons-logging-LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - diff --git a/modules/reindex/licenses/commons-logging-NOTICE.txt b/modules/reindex/licenses/commons-logging-NOTICE.txt deleted file mode 100644 index 72eb32a9024..00000000000 --- a/modules/reindex/licenses/commons-logging-NOTICE.txt +++ /dev/null @@ -1,5 +0,0 @@ -Apache Commons CLI -Copyright 2001-2009 The Apache Software Foundation - -This product includes software developed by -The Apache Software Foundation (http://www.apache.org/). diff --git a/modules/reindex/licenses/httpclient-4.5.2.jar.sha1 b/modules/reindex/licenses/httpclient-4.5.2.jar.sha1 deleted file mode 100644 index 6937112a09f..00000000000 --- a/modules/reindex/licenses/httpclient-4.5.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/modules/reindex/licenses/httpclient-LICENSE.txt b/modules/reindex/licenses/httpclient-LICENSE.txt deleted file mode 100644 index 32f01eda18f..00000000000 --- a/modules/reindex/licenses/httpclient-LICENSE.txt +++ /dev/null @@ -1,558 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - -========================================================================= - -This project includes Public Suffix List copied from - -licensed under the terms of the Mozilla Public License, v. 2.0 - -Full license text: - -Mozilla Public License Version 2.0 -================================== - -1. Definitions --------------- - -1.1. "Contributor" - means each individual or legal entity that creates, contributes to - the creation of, or owns Covered Software. - -1.2. "Contributor Version" - means the combination of the Contributions of others (if any) used - by a Contributor and that particular Contributor's Contribution. - -1.3. "Contribution" - means Covered Software of a particular Contributor. - -1.4. "Covered Software" - means Source Code Form to which the initial Contributor has attached - the notice in Exhibit A, the Executable Form of such Source Code - Form, and Modifications of such Source Code Form, in each case - including portions thereof. - -1.5. "Incompatible With Secondary Licenses" - means - - (a) that the initial Contributor has attached the notice described - in Exhibit B to the Covered Software; or - - (b) that the Covered Software was made available under the terms of - version 1.1 or earlier of the License, but not also under the - terms of a Secondary License. - -1.6. "Executable Form" - means any form of the work other than Source Code Form. - -1.7. "Larger Work" - means a work that combines Covered Software with other material, in - a separate file or files, that is not Covered Software. - -1.8. "License" - means this document. - -1.9. "Licensable" - means having the right to grant, to the maximum extent possible, - whether at the time of the initial grant or subsequently, any and - all of the rights conveyed by this License. - -1.10. "Modifications" - means any of the following: - - (a) any file in Source Code Form that results from an addition to, - deletion from, or modification of the contents of Covered - Software; or - - (b) any new file in Source Code Form that contains any Covered - Software. - -1.11. "Patent Claims" of a Contributor - means any patent claim(s), including without limitation, method, - process, and apparatus claims, in any patent Licensable by such - Contributor that would be infringed, but for the grant of the - License, by the making, using, selling, offering for sale, having - made, import, or transfer of either its Contributions or its - Contributor Version. - -1.12. "Secondary License" - means either the GNU General Public License, Version 2.0, the GNU - Lesser General Public License, Version 2.1, the GNU Affero General - Public License, Version 3.0, or any later versions of those - licenses. - -1.13. "Source Code Form" - means the form of the work preferred for making modifications. - -1.14. "You" (or "Your") - means an individual or a legal entity exercising rights under this - License. For legal entities, "You" includes any entity that - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants and Conditions --------------------------------- - -2.1. Grants - -Each Contributor hereby grants You a world-wide, royalty-free, -non-exclusive license: - -(a) under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or - as part of a Larger Work; and - -(b) under Patent Claims of such Contributor to make, use, sell, offer - for sale, have made, import, and otherwise transfer either its - Contributions or its Contributor Version. - -2.2. Effective Date - -The licenses granted in Section 2.1 with respect to any Contribution -become effective for each Contribution on the date the Contributor first -distributes such Contribution. - -2.3. Limitations on Grant Scope - -The licenses granted in this Section 2 are the only rights granted under -this License. No additional rights or licenses will be implied from the -distribution or licensing of Covered Software under this License. -Notwithstanding Section 2.1(b) above, no patent license is granted by a -Contributor: - -(a) for any code that a Contributor has removed from Covered Software; - or - -(b) for infringements caused by: (i) Your and any other third party's - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - -(c) under Patent Claims infringed by Covered Software in the absence of - its Contributions. - -This License does not grant any rights in the trademarks, service marks, -or logos of any Contributor (except as may be necessary to comply with -the notice requirements in Section 3.4). - -2.4. Subsequent Licenses - -No Contributor makes additional grants as a result of Your choice to -distribute the Covered Software under a subsequent version of this -License (see Section 10.2) or under the terms of a Secondary License (if -permitted under the terms of Section 3.3). - -2.5. Representation - -Each Contributor represents that the Contributor believes its -Contributions are its original creation(s) or it has sufficient rights -to grant the rights to its Contributions conveyed by this License. - -2.6. Fair Use - -This License is not intended to limit any rights You have under -applicable copyright doctrines of fair use, fair dealing, or other -equivalents. - -2.7. Conditions - -Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted -in Section 2.1. - -3. Responsibilities -------------------- - -3.1. Distribution of Source Form - -All distribution of Covered Software in Source Code Form, including any -Modifications that You create or to which You contribute, must be under -the terms of this License. You must inform recipients that the Source -Code Form of the Covered Software is governed by the terms of this -License, and how they can obtain a copy of this License. You may not -attempt to alter or restrict the recipients' rights in the Source Code -Form. - -3.2. Distribution of Executable Form - -If You distribute Covered Software in Executable Form then: - -(a) such Covered Software must also be made available in Source Code - Form, as described in Section 3.1, and You must inform recipients of - the Executable Form how they can obtain a copy of such Source Code - Form by reasonable means in a timely manner, at a charge no more - than the cost of distribution to the recipient; and - -(b) You may distribute such Executable Form under the terms of this - License, or sublicense it under different terms, provided that the - license for the Executable Form does not attempt to limit or alter - the recipients' rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - -You may create and distribute a Larger Work under terms of Your choice, -provided that You also comply with the requirements of this License for -the Covered Software. If the Larger Work is a combination of Covered -Software with a work governed by one or more Secondary Licenses, and the -Covered Software is not Incompatible With Secondary Licenses, this -License permits You to additionally distribute such Covered Software -under the terms of such Secondary License(s), so that the recipient of -the Larger Work may, at their option, further distribute the Covered -Software under the terms of either this License or such Secondary -License(s). - -3.4. Notices - -You may not remove or alter the substance of any license notices -(including copyright notices, patent notices, disclaimers of warranty, -or limitations of liability) contained within the Source Code Form of -the Covered Software, except that You may alter any license notices to -the extent required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - -You may choose to offer, and to charge a fee for, warranty, support, -indemnity or liability obligations to one or more recipients of Covered -Software. However, You may do so only on Your own behalf, and not on -behalf of any Contributor. You must make it absolutely clear that any -such warranty, support, indemnity, or liability obligation is offered by -You alone, and You hereby agree to indemnify every Contributor for any -liability incurred by such Contributor as a result of warranty, support, -indemnity or liability terms You offer. You may include additional -disclaimers of warranty and limitations of liability specific to any -jurisdiction. - -4. Inability to Comply Due to Statute or Regulation ---------------------------------------------------- - -If it is impossible for You to comply with any of the terms of this -License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: (a) comply with -the terms of this License to the maximum extent possible; and (b) -describe the limitations and the code they affect. Such description must -be placed in a text file included with all distributions of the Covered -Software under this License. Except to the extent prohibited by statute -or regulation, such description must be sufficiently detailed for a -recipient of ordinary skill to be able to understand it. - -5. Termination --------------- - -5.1. The rights granted under this License will terminate automatically -if You fail to comply with any of its terms. However, if You become -compliant, then the rights granted under this License from a particular -Contributor are reinstated (a) provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and (b) on an -ongoing basis, if such Contributor fails to notify You of the -non-compliance by some reasonable means prior to 60 days after You have -come back into compliance. Moreover, Your grants from a particular -Contributor are reinstated on an ongoing basis if such Contributor -notifies You of the non-compliance by some reasonable means, this is the -first time You have received notice of non-compliance with this License -from such Contributor, and You become compliant prior to 30 days after -Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent -infringement claim (excluding declaratory judgment actions, -counter-claims, and cross-claims) alleging that a Contributor Version -directly or indirectly infringes any patent, then the rights granted to -You by any and all Contributors for the Covered Software under Section -2.1 of this License shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all -end user license agreements (excluding distributors and resellers) which -have been validly granted by You or Your distributors under this License -prior to termination shall survive termination. - -************************************************************************ -* * -* 6. Disclaimer of Warranty * -* ------------------------- * -* * -* Covered Software is provided under this License on an "as is" * -* basis, without warranty of any kind, either expressed, implied, or * -* statutory, including, without limitation, warranties that the * -* Covered Software is free of defects, merchantable, fit for a * -* particular purpose or non-infringing. The entire risk as to the * -* quality and performance of the Covered Software is with You. * -* Should any Covered Software prove defective in any respect, You * -* (not any Contributor) assume the cost of any necessary servicing, * -* repair, or correction. This disclaimer of warranty constitutes an * -* essential part of this License. No use of any Covered Software is * -* authorized under this License except under this disclaimer. * -* * -************************************************************************ - -************************************************************************ -* * -* 7. Limitation of Liability * -* -------------------------- * -* * -* Under no circumstances and under no legal theory, whether tort * -* (including negligence), contract, or otherwise, shall any * -* Contributor, or anyone who distributes Covered Software as * -* permitted above, be liable to You for any direct, indirect, * -* special, incidental, or consequential damages of any character * -* including, without limitation, damages for lost profits, loss of * -* goodwill, work stoppage, computer failure or malfunction, or any * -* and all other commercial damages or losses, even if such party * -* shall have been informed of the possibility of such damages. This * -* limitation of liability shall not apply to liability for death or * -* personal injury resulting from such party's negligence to the * -* extent applicable law prohibits such limitation. Some * -* jurisdictions do not allow the exclusion or limitation of * -* incidental or consequential damages, so this exclusion and * -* limitation may not apply to You. * -* * -************************************************************************ - -8. Litigation -------------- - -Any litigation relating to this License may be brought only in the -courts of a jurisdiction where the defendant maintains its principal -place of business and such litigation shall be governed by laws of that -jurisdiction, without reference to its conflict-of-law provisions. -Nothing in this Section shall prevent a party's ability to bring -cross-claims or counter-claims. - -9. Miscellaneous ----------------- - -This License represents the complete agreement concerning the subject -matter hereof. If any provision of this License is held to be -unenforceable, such provision shall be reformed only to the extent -necessary to make it enforceable. Any law or regulation which provides -that the language of a contract shall be construed against the drafter -shall not be used to construe this License against a Contributor. - -10. Versions of the License ---------------------------- - -10.1. New Versions - -Mozilla Foundation is the license steward. Except as provided in Section -10.3, no one other than the license steward has the right to modify or -publish new versions of this License. Each version will be given a -distinguishing version number. - -10.2. Effect of New Versions - -You may distribute the Covered Software under the terms of the version -of the License under which You originally received the Covered Software, -or under the terms of any subsequent version published by the license -steward. - -10.3. Modified Versions - -If you create software not governed by this License, and you want to -create a new license for such software, you may create and use a -modified version of this License if you rename the license and remove -any references to the name of the license steward (except to note that -such modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary -Licenses - -If You choose to distribute Source Code Form that is Incompatible With -Secondary Licenses under the terms of this version of the License, the -notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice -------------------------------------------- - - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular -file, then You may include the notice in a location (such as a LICENSE -file in a relevant directory) where a recipient would be likely to look -for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - "Incompatible With Secondary Licenses" Notice ---------------------------------------------------------- - - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0. diff --git a/modules/reindex/licenses/httpclient-NOTICE.txt b/modules/reindex/licenses/httpclient-NOTICE.txt deleted file mode 100644 index 4f6058178b2..00000000000 --- a/modules/reindex/licenses/httpclient-NOTICE.txt +++ /dev/null @@ -1,5 +0,0 @@ -Apache HttpComponents Client -Copyright 1999-2015 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). diff --git a/modules/reindex/licenses/httpcore-4.4.5.jar.sha1 b/modules/reindex/licenses/httpcore-4.4.5.jar.sha1 deleted file mode 100644 index 58172660174..00000000000 --- a/modules/reindex/licenses/httpcore-4.4.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e7501a1b34325abb00d17dde96150604a0658b54 \ No newline at end of file diff --git a/modules/reindex/licenses/httpcore-LICENSE.txt b/modules/reindex/licenses/httpcore-LICENSE.txt deleted file mode 100644 index 72819a9f06f..00000000000 --- a/modules/reindex/licenses/httpcore-LICENSE.txt +++ /dev/null @@ -1,241 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - -========================================================================= - -This project contains annotations in the package org.apache.http.annotation -which are derived from JCIP-ANNOTATIONS -Copyright (c) 2005 Brian Goetz and Tim Peierls. -See http://www.jcip.net and the Creative Commons Attribution License -(http://creativecommons.org/licenses/by/2.5) -Full text: http://creativecommons.org/licenses/by/2.5/legalcode - -License - -THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED. - -BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE TO BE BOUND BY THE TERMS OF THIS LICENSE. THE LICENSOR GRANTS YOU THE RIGHTS CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND CONDITIONS. - -1. Definitions - - "Collective Work" means a work, such as a periodical issue, anthology or encyclopedia, in which the Work in its entirety in unmodified form, along with a number of other contributions, constituting separate and independent works in themselves, are assembled into a collective whole. A work that constitutes a Collective Work will not be considered a Derivative Work (as defined below) for the purposes of this License. - "Derivative Work" means a work based upon the Work or upon the Work and other pre-existing works, such as a translation, musical arrangement, dramatization, fictionalization, motion picture version, sound recording, art reproduction, abridgment, condensation, or any other form in which the Work may be recast, transformed, or adapted, except that a work that constitutes a Collective Work will not be considered a Derivative Work for the purpose of this License. For the avoidance of doubt, where the Work is a musical composition or sound recording, the synchronization of the Work in timed-relation with a moving image ("synching") will be considered a Derivative Work for the purpose of this License. - "Licensor" means the individual or entity that offers the Work under the terms of this License. - "Original Author" means the individual or entity who created the Work. - "Work" means the copyrightable work of authorship offered under the terms of this License. - "You" means an individual or entity exercising rights under this License who has not previously violated the terms of this License with respect to the Work, or who has received express permission from the Licensor to exercise rights under this License despite a previous violation. - -2. Fair Use Rights. Nothing in this license is intended to reduce, limit, or restrict any rights arising from fair use, first sale or other limitations on the exclusive rights of the copyright owner under copyright law or other applicable laws. - -3. License Grant. Subject to the terms and conditions of this License, Licensor hereby grants You a worldwide, royalty-free, non-exclusive, perpetual (for the duration of the applicable copyright) license to exercise the rights in the Work as stated below: - - to reproduce the Work, to incorporate the Work into one or more Collective Works, and to reproduce the Work as incorporated in the Collective Works; - to create and reproduce Derivative Works; - to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission the Work including as incorporated in Collective Works; - to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission Derivative Works. - - For the avoidance of doubt, where the work is a musical composition: - Performance Royalties Under Blanket Licenses. Licensor waives the exclusive right to collect, whether individually or via a performance rights society (e.g. ASCAP, BMI, SESAC), royalties for the public performance or public digital performance (e.g. webcast) of the Work. - Mechanical Rights and Statutory Royalties. Licensor waives the exclusive right to collect, whether individually or via a music rights agency or designated agent (e.g. Harry Fox Agency), royalties for any phonorecord You create from the Work ("cover version") and distribute, subject to the compulsory license created by 17 USC Section 115 of the US Copyright Act (or the equivalent in other jurisdictions). - Webcasting Rights and Statutory Royalties. For the avoidance of doubt, where the Work is a sound recording, Licensor waives the exclusive right to collect, whether individually or via a performance-rights society (e.g. SoundExchange), royalties for the public digital performance (e.g. webcast) of the Work, subject to the compulsory license created by 17 USC Section 114 of the US Copyright Act (or the equivalent in other jurisdictions). - -The above rights may be exercised in all media and formats whether now known or hereafter devised. The above rights include the right to make such modifications as are technically necessary to exercise the rights in other media and formats. All rights not expressly granted by Licensor are hereby reserved. - -4. Restrictions.The license granted in Section 3 above is expressly made subject to and limited by the following restrictions: - - You may distribute, publicly display, publicly perform, or publicly digitally perform the Work only under the terms of this License, and You must include a copy of, or the Uniform Resource Identifier for, this License with every copy or phonorecord of the Work You distribute, publicly display, publicly perform, or publicly digitally perform. You may not offer or impose any terms on the Work that alter or restrict the terms of this License or the recipients' exercise of the rights granted hereunder. You may not sublicense the Work. You must keep intact all notices that refer to this License and to the disclaimer of warranties. You may not distribute, publicly display, publicly perform, or publicly digitally perform the Work with any technological measures that control access or use of the Work in a manner inconsistent with the terms of this License Agreement. The above applies to the Work as incorporated in a Collective Work, but this does not require the Collective Work apart from the Work itself to be made subject to the terms of this License. If You create a Collective Work, upon notice from any Licensor You must, to the extent practicable, remove from the Collective Work any credit as required by clause 4(b), as requested. If You create a Derivative Work, upon notice from any Licensor You must, to the extent practicable, remove from the Derivative Work any credit as required by clause 4(b), as requested. - If you distribute, publicly display, publicly perform, or publicly digitally perform the Work or any Derivative Works or Collective Works, You must keep intact all copyright notices for the Work and provide, reasonable to the medium or means You are utilizing: (i) the name of the Original Author (or pseudonym, if applicable) if supplied, and/or (ii) if the Original Author and/or Licensor designate another party or parties (e.g. a sponsor institute, publishing entity, journal) for attribution in Licensor's copyright notice, terms of service or by other reasonable means, the name of such party or parties; the title of the Work if supplied; to the extent reasonably practicable, the Uniform Resource Identifier, if any, that Licensor specifies to be associated with the Work, unless such URI does not refer to the copyright notice or licensing information for the Work; and in the case of a Derivative Work, a credit identifying the use of the Work in the Derivative Work (e.g., "French translation of the Work by Original Author," or "Screenplay based on original Work by Original Author"). Such credit may be implemented in any reasonable manner; provided, however, that in the case of a Derivative Work or Collective Work, at a minimum such credit will appear where any other comparable authorship credit appears and in a manner at least as prominent as such other comparable authorship credit. - -5. Representations, Warranties and Disclaimer - -UNLESS OTHERWISE MUTUALLY AGREED TO BY THE PARTIES IN WRITING, LICENSOR OFFERS THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTIBILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OF ABSENCE OF ERRORS, WHETHER OR NOT DISCOVERABLE. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OF IMPLIED WARRANTIES, SO SUCH EXCLUSION MAY NOT APPLY TO YOU. - -6. Limitation on Liability. EXCEPT TO THE EXTENT REQUIRED BY APPLICABLE LAW, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR ANY SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES ARISING OUT OF THIS LICENSE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -7. Termination - - This License and the rights granted hereunder will terminate automatically upon any breach by You of the terms of this License. Individuals or entities who have received Derivative Works or Collective Works from You under this License, however, will not have their licenses terminated provided such individuals or entities remain in full compliance with those licenses. Sections 1, 2, 5, 6, 7, and 8 will survive any termination of this License. - Subject to the above terms and conditions, the license granted here is perpetual (for the duration of the applicable copyright in the Work). Notwithstanding the above, Licensor reserves the right to release the Work under different license terms or to stop distributing the Work at any time; provided, however that any such election will not serve to withdraw this License (or any other license that has been, or is required to be, granted under the terms of this License), and this License will continue in full force and effect unless terminated as stated above. - -8. Miscellaneous - - Each time You distribute or publicly digitally perform the Work or a Collective Work, the Licensor offers to the recipient a license to the Work on the same terms and conditions as the license granted to You under this License. - Each time You distribute or publicly digitally perform a Derivative Work, Licensor offers to the recipient a license to the original Work on the same terms and conditions as the license granted to You under this License. - If any provision of this License is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this License, and without further action by the parties to this agreement, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. - No term or provision of this License shall be deemed waived and no breach consented to unless such waiver or consent shall be in writing and signed by the party to be charged with such waiver or consent. - This License constitutes the entire agreement between the parties with respect to the Work licensed here. There are no understandings, agreements or representations with respect to the Work not specified here. Licensor shall not be bound by any additional provisions that may appear in any communication from You. This License may not be modified without the mutual written agreement of the Licensor and You. diff --git a/modules/reindex/licenses/httpcore-NOTICE.txt b/modules/reindex/licenses/httpcore-NOTICE.txt deleted file mode 100644 index c0be50a505e..00000000000 --- a/modules/reindex/licenses/httpcore-NOTICE.txt +++ /dev/null @@ -1,8 +0,0 @@ -Apache HttpComponents Core -Copyright 2005-2014 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - -This project contains annotations derived from JCIP-ANNOTATIONS -Copyright (c) 2005 Brian Goetz and Tim Peierls. See http://www.jcip.net From 94bc489275d4462a8b2520f6d3ef4f70dc1d6e99 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Tue, 26 Jul 2016 08:27:47 +0200 Subject: [PATCH 64/93] Temporarily increase Netty4RestIT timeout Recently, we experience timeouts on our Windows build slaves for Netty4RestIT. Until we have figured out what's going on, we increase this test suite's timeout temporarily to ensure this timeout does not mask other problems. --- .../java/org/elasticsearch/http/netty4/Netty4RestIT.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java index cbd8f7cd1e6..71cbf70abb3 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java @@ -22,12 +22,16 @@ package org.elasticsearch.http.netty4; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; +import org.apache.lucene.util.TimeUnits; import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; +//TODO: This is a *temporary* workaround to ensure a timeout does not mask other problems +@TimeoutSuite(millis = 30 * TimeUnits.MINUTE) public class Netty4RestIT extends ESClientYamlSuiteTestCase { public Netty4RestIT(@Name("yaml") RestTestCandidate testCandidate) { @@ -39,4 +43,4 @@ public class Netty4RestIT extends ESClientYamlSuiteTestCase { return ESClientYamlSuiteTestCase.createParameters(0, 1); } -} \ No newline at end of file +} From c7c0faa54ddda8134aa23d54d07bed43b24a1817 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 22 Jul 2016 14:54:40 +0200 Subject: [PATCH 65/93] aggs: Changed how `nested` and `reverse_nested` aggs know about their nested depth level. Before the aggregation tree was traversed to figure out what the parent level is, this commit changes that by using `NestedScope` to figure out the nested depth level. The big upsides are that this cleans up `NestedAggregator` (it used a hack to lazily figure out the nested parent filter) and this is also what `nested` query uses and therefor the `nested` query can be included inside `nested` aggregation and work correctly. Closes #11749 Closes #12410 --- .../aggregations/AggregatorFactory.java | 6 +- .../nested/NestedAggregationBuilder.java | 19 ++- .../bucket/nested/NestedAggregator.java | 74 +++------- .../nested/NestedAggregatorFactory.java | 19 ++- .../ReverseNestedAggregationBuilder.java | 41 +++++- .../ReverseNestedAggregatorFactory.java | 42 ++---- .../search/aggregations/bucket/NestedIT.java | 137 +++++++++++++++++- .../aggregations/bucket/ReverseNestedIT.java | 22 ++- 8 files changed, 244 insertions(+), 116 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java index 854838b7441..44ecacd8417 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java @@ -229,6 +229,10 @@ public abstract class AggregatorFactory> { return type.name(); } + public AggregatorFactory getParent() { + return parent; + } + /** * Utility method. Given an {@link AggregatorFactory} that creates * {@link Aggregator}s that only know how to collect bucket 0, this @@ -241,4 +245,4 @@ public abstract class AggregatorFactory> { return new MultiBucketAggregatorWrapper(bigArrays, context, parent, factory, first); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java index dc47cc09578..98c5f02af05 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java @@ -25,8 +25,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; +import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -80,7 +82,22 @@ public class NestedAggregationBuilder extends AbstractAggregationBuilder doBuild(AggregationContext context, AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { - return new NestedAggregatorFactory(name, type, path, context, parent, subFactoriesBuilder, metaData); + ObjectMapper childObjectMapper = context.searchContext().getObjectMapper(path); + if (childObjectMapper == null) { + // in case the path has been unmapped: + return new NestedAggregatorFactory(name, type, null, null, context, parent, subFactoriesBuilder, metaData); + } + + if (childObjectMapper.nested().isNested() == false) { + throw new AggregationExecutionException("[nested] nested path [" + path + "] is not nested"); + } + try { + ObjectMapper parentObjectMapper = context.searchContext().getQueryShardContext().nestedScope().nextLevel(childObjectMapper); + return new NestedAggregatorFactory(name, type, parentObjectMapper, childObjectMapper, context, parent, subFactoriesBuilder, + metaData); + } finally { + context.searchContext().getQueryShardContext().nestedScope().previousLevel(); + } } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 677951c52c0..7716810a1b3 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -44,69 +44,40 @@ import java.io.IOException; import java.util.List; import java.util.Map; -/** - * - */ public class NestedAggregator extends SingleBucketAggregator { static final ParseField PATH_FIELD = new ParseField("path"); - private BitSetProducer parentFilter; + private final BitSetProducer parentFilter; private final Query childFilter; - private DocIdSetIterator childDocs; - private BitSet parentDocs; - - public NestedAggregator(String name, AggregatorFactories factories, ObjectMapper objectMapper, AggregationContext aggregationContext, Aggregator parentAggregator, List pipelineAggregators, Map metaData) throws IOException { + public NestedAggregator(String name, AggregatorFactories factories, ObjectMapper parentObjectMapper, ObjectMapper childObjectMapper, + AggregationContext aggregationContext, Aggregator parentAggregator, + List pipelineAggregators, Map metaData) throws IOException { super(name, factories, aggregationContext, parentAggregator, pipelineAggregators, metaData); - childFilter = objectMapper.nestedTypeFilter(); + Query parentFilter = parentObjectMapper != null ? parentObjectMapper.nestedTypeFilter() : Queries.newNonNestedFilter(); + this.parentFilter = context.searchContext().bitsetFilterCache().getBitSetProducer(parentFilter); + this.childFilter = childObjectMapper.nestedTypeFilter(); } @Override public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { - // Reset parentFilter, so we resolve the parentDocs for each new segment being searched - this.parentFilter = null; - final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(ctx); - final IndexSearcher searcher = new IndexSearcher(topLevelContext); + IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(ctx); + IndexSearcher searcher = new IndexSearcher(topLevelContext); searcher.setQueryCache(null); - final Weight weight = searcher.createNormalizedWeight(childFilter, false); + Weight weight = searcher.createNormalizedWeight(childFilter, false); Scorer childDocsScorer = weight.scorer(ctx); - if (childDocsScorer == null) { - childDocs = null; - } else { - childDocs = childDocsScorer.iterator(); - } + final BitSet parentDocs = parentFilter.getBitSet(ctx); + final DocIdSetIterator childDocs = childDocsScorer != null ? childDocsScorer.iterator() : null; return new LeafBucketCollectorBase(sub, null) { @Override public void collect(int parentDoc, long bucket) throws IOException { - // here we translate the parent doc to a list of its nested docs, and then call super.collect for evey one of them so they'll be collected - - // if parentDoc is 0 then this means that this parent doesn't have child docs (b/c these appear always before the parent doc), so we can skip: - if (parentDoc == 0 || childDocs == null) { + // if parentDoc is 0 then this means that this parent doesn't have child docs (b/c these appear always before the parent + // doc), so we can skip: + if (parentDoc == 0 || parentDocs == null || childDocs == null) { return; } - if (parentFilter == null) { - // The aggs are instantiated in reverse, first the most inner nested aggs and lastly the top level aggs - // So at the time a nested 'nested' aggs is parsed its closest parent nested aggs hasn't been constructed. - // So the trick is to set at the last moment just before needed and we can use its child filter as the - // parent filter. - - // Additional NOTE: Before this logic was performed in the setNextReader(...) method, but the assumption - // that aggs instances are constructed in reverse doesn't hold when buckets are constructed lazily during - // aggs execution - Query parentFilterNotCached = findClosestNestedPath(parent()); - if (parentFilterNotCached == null) { - parentFilterNotCached = Queries.newNonNestedFilter(); - } - parentFilter = context.searchContext().bitsetFilterCache().getBitSetProducer(parentFilterNotCached); - parentDocs = parentFilter.getBitSet(ctx); - if (parentDocs == null) { - // There are no parentDocs in the segment, so return and set childDocs to null, so we exit early for future invocations. - childDocs = null; - return; - } - } final int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1); int childDocId = childDocs.docID(); @@ -123,8 +94,8 @@ public class NestedAggregator extends SingleBucketAggregator { @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { - return new InternalNested(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), pipelineAggregators(), - metaData()); + return new InternalNested(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), + pipelineAggregators(), metaData()); } @Override @@ -132,15 +103,4 @@ public class NestedAggregator extends SingleBucketAggregator { return new InternalNested(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData()); } - private static Query findClosestNestedPath(Aggregator parent) { - for (; parent != null; parent = parent.parent()) { - if (parent instanceof NestedAggregator) { - return ((NestedAggregator) parent).childFilter; - } else if (parent instanceof ReverseNestedAggregator) { - return ((ReverseNestedAggregator) parent).getParentFilter(); - } - } - return null; - } - } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorFactory.java index b09e3d4d40b..ef19a186d52 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorFactory.java @@ -36,12 +36,15 @@ import java.util.Map; public class NestedAggregatorFactory extends AggregatorFactory { - private final String path; + private final ObjectMapper parentObjectMapper; + private final ObjectMapper childObjectMapper; - public NestedAggregatorFactory(String name, Type type, String path, AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactories, Map metaData) throws IOException { + public NestedAggregatorFactory(String name, Type type, ObjectMapper parentObjectMapper, ObjectMapper childObjectMapper, + AggregationContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactories, + Map metaData) throws IOException { super(name, type, context, parent, subFactories, metaData); - this.path = path; + this.parentObjectMapper = parentObjectMapper; + this.childObjectMapper = childObjectMapper; } @Override @@ -50,14 +53,10 @@ public class NestedAggregatorFactory extends AggregatorFactory { @@ -82,7 +88,40 @@ public class ReverseNestedAggregationBuilder extends AbstractAggregationBuilder< @Override protected AggregatorFactory doBuild(AggregationContext context, AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { - return new ReverseNestedAggregatorFactory(name, type, path, context, parent, subFactoriesBuilder, metaData); + if (findNestedAggregatorFactory(parent) == null) { + throw new SearchParseException(context.searchContext(), + "Reverse nested aggregation [" + name + "] can only be used inside a [nested] aggregation", null); + } + + ObjectMapper parentObjectMapper = null; + if (path != null) { + parentObjectMapper = context.searchContext().getObjectMapper(path); + if (parentObjectMapper == null) { + return new ReverseNestedAggregatorFactory(name, type, true, null, context, parent, subFactoriesBuilder, metaData); + } + if (parentObjectMapper.nested().isNested() == false) { + throw new AggregationExecutionException("[reverse_nested] nested path [" + path + "] is not nested"); + } + } + + NestedScope nestedScope = context.searchContext().getQueryShardContext().nestedScope(); + try { + nestedScope.nextLevel(parentObjectMapper); + return new ReverseNestedAggregatorFactory(name, type, false, parentObjectMapper, context, parent, subFactoriesBuilder, + metaData); + } finally { + nestedScope.previousLevel(); + } + } + + private static NestedAggregatorFactory findNestedAggregatorFactory(AggregatorFactory parent) { + if (parent == null) { + return null; + } else if (parent instanceof NestedAggregatorFactory) { + return (NestedAggregatorFactory) parent; + } else { + return findNestedAggregatorFactory(parent.getParent()); + } } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorFactory.java index b8be6479436..d626c1b5441 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorFactory.java @@ -37,46 +37,26 @@ import java.util.Map; public class ReverseNestedAggregatorFactory extends AggregatorFactory { - private final String path; + private final boolean unmapped; + private final ObjectMapper parentObjectMapper; - public ReverseNestedAggregatorFactory(String name, Type type, String path, AggregationContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactories, Map metaData) throws IOException { + public ReverseNestedAggregatorFactory(String name, Type type, boolean unmapped, ObjectMapper parentObjectMapper, + AggregationContext context, AggregatorFactory parent, + AggregatorFactories.Builder subFactories, + Map metaData) throws IOException { super(name, type, context, parent, subFactories, metaData); - this.path = path; + this.unmapped = unmapped; + this.parentObjectMapper = parentObjectMapper; } @Override public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) throws IOException { - // Early validation - NestedAggregator closestNestedAggregator = findClosestNestedAggregator(parent); - if (closestNestedAggregator == null) { - throw new SearchParseException(context.searchContext(), - "Reverse nested aggregation [" + name + "] can only be used inside a [nested] aggregation", null); - } - - final ObjectMapper objectMapper; - if (path != null) { - objectMapper = context.searchContext().getObjectMapper(path); - if (objectMapper == null) { - return new Unmapped(name, context, parent, pipelineAggregators, metaData); - } - if (!objectMapper.nested().isNested()) { - throw new AggregationExecutionException("[reverse_nested] nested path [" + path + "] is not nested"); - } + if (unmapped) { + return new Unmapped(name, context, parent, pipelineAggregators, metaData); } else { - objectMapper = null; + return new ReverseNestedAggregator(name, factories, parentObjectMapper, context, parent, pipelineAggregators, metaData); } - return new ReverseNestedAggregator(name, factories, objectMapper, context, parent, pipelineAggregators, metaData); - } - - private static NestedAggregator findClosestNestedAggregator(Aggregator parent) { - for (; parent != null; parent = parent.parent()) { - if (parent instanceof NestedAggregator) { - return (NestedAggregator) parent; - } - } - return null; } private static final class Unmapped extends NonCollectingAggregator { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index b3241fc608c..c8b7aa6ad55 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.search.aggregations.bucket; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; @@ -44,6 +45,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.QueryBuilders.nestedQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; @@ -62,15 +64,12 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.core.IsNull.notNullValue; -/** - * - */ @ESIntegTestCase.SuiteScopeTestCase public class NestedIT extends ESIntegTestCase { - static int numParents; - static int[] numChildren; - static SubAggCollectionMode aggCollectionMode; + private static int numParents; + private static int[] numChildren; + private static SubAggCollectionMode aggCollectionMode; @Override public void setupSuiteScopeCluster() throws Exception { @@ -245,7 +244,7 @@ public class NestedIT extends ESIntegTestCase { assertThat(nested, notNullValue()); assertThat(nested.getName(), equalTo("nested")); assertThat(nested.getDocCount(), equalTo(docCount)); - assertThat((long) nested.getProperty("_count"), equalTo(docCount)); + assertThat(nested.getProperty("_count"), equalTo(docCount)); assertThat(nested.getAggregations().asList().isEmpty(), is(false)); LongTerms values = nested.getAggregations().get("values"); @@ -263,7 +262,7 @@ public class NestedIT extends ESIntegTestCase { assertEquals(counts[i], bucket.getDocCount()); } } - assertThat((LongTerms) nested.getProperty("values"), sameInstance(values)); + assertThat(nested.getProperty("values"), sameInstance(values)); } public void testNestedAsSubAggregation() throws Exception { @@ -544,4 +543,126 @@ public class NestedIT extends ESIntegTestCase { assertThat(propertyId.getBucketByKey("2").getDocCount(), equalTo(1L)); assertThat(propertyId.getBucketByKey("3").getDocCount(), equalTo(1L)); } + + public void testFilterAggInsideNestedAgg() throws Exception { + assertAcked(prepareCreate("classes") + .addMapping("class", jsonBuilder().startObject().startObject("class").startObject("properties") + .startObject("name").field("type", "text").endObject() + .startObject("methods") + .field("type", "nested") + .startObject("properties") + .startObject("name").field("type", "text").endObject() + .startObject("return_type").field("type", "keyword").endObject() + .startObject("parameters") + .field("type", "nested") + .startObject("properties") + .startObject("name").field("type", "text").endObject() + .startObject("type").field("type", "keyword").endObject() + .endObject() + .endObject() + .endObject() + .endObject().endObject().endObject().endObject())); + + client().prepareIndex("classes", "class", "1").setSource(jsonBuilder().startObject() + .field("name", "QueryBuilder") + .startArray("methods") + .startObject() + .field("name", "toQuery") + .field("return_type", "Query") + .startArray("parameters") + .startObject() + .field("name", "context") + .field("type", "QueryShardContext") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "queryName") + .field("return_type", "QueryBuilder") + .startArray("parameters") + .startObject() + .field("name", "queryName") + .field("type", "String") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "boost") + .field("return_type", "QueryBuilder") + .startArray("parameters") + .startObject() + .field("name", "boost") + .field("type", "float") + .endObject() + .endArray() + .endObject() + .endArray() + .endObject()).get(); + client().prepareIndex("classes", "class", "2").setSource(jsonBuilder().startObject() + .field("name", "Document") + .startArray("methods") + .startObject() + .field("name", "add") + .field("return_type", "void") + .startArray("parameters") + .startObject() + .field("name", "field") + .field("type", "IndexableField") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "removeField") + .field("return_type", "void") + .startArray("parameters") + .startObject() + .field("name", "name") + .field("type", "String") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "removeFields") + .field("return_type", "void") + .startArray("parameters") + .startObject() + .field("name", "name") + .field("type", "String") + .endObject() + .endArray() + .endObject() + .endArray() + .endObject()).get(); + refresh(); + + SearchResponse response = client().prepareSearch("classes").addAggregation(nested("to_method", "methods") + .subAggregation(filter("num_string_params", + nestedQuery("methods.parameters", termQuery("methods.parameters.type", "String"), ScoreMode.None))) + ).get(); + Nested toMethods = response.getAggregations().get("to_method"); + Filter numStringParams = toMethods.getAggregations().get("num_string_params"); + assertThat(numStringParams.getDocCount(), equalTo(3L)); + + response = client().prepareSearch("classes").addAggregation(nested("to_method", "methods") + .subAggregation(terms("return_type").field("methods.return_type").subAggregation( + filter("num_string_params", nestedQuery("methods.parameters", termQuery("methods.parameters.type", "String"), ScoreMode.None)) + ) + )).get(); + toMethods = response.getAggregations().get("to_method"); + Terms terms = toMethods.getAggregations().get("return_type"); + Bucket bucket = terms.getBucketByKey("void"); + assertThat(bucket.getDocCount(), equalTo(3L)); + numStringParams = bucket.getAggregations().get("num_string_params"); + assertThat(numStringParams.getDocCount(), equalTo(2L)); + + bucket = terms.getBucketByKey("QueryBuilder"); + assertThat(bucket.getDocCount(), equalTo(2L)); + numStringParams = bucket.getAggregations().get("num_string_params"); + assertThat(numStringParams.getDocCount(), equalTo(1L)); + + bucket = terms.getBucketByKey("Query"); + assertThat(bucket.getDocCount(), equalTo(1L)); + numStringParams = bucket.getAggregations().get("num_string_params"); + assertThat(numStringParams.getDocCount(), equalTo(0L)); + } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java index 1d74c03048a..db7c301883a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java @@ -55,9 +55,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.core.IsNull.notNullValue; -/** - * - */ @ESIntegTestCase.SuiteScopeTestCase public class ReverseNestedIT extends ESIntegTestCase { @@ -170,9 +167,9 @@ public class ReverseNestedIT extends ESIntegTestCase { assertThat(bucket.getKeyAsString(), equalTo("1")); assertThat(bucket.getDocCount(), equalTo(6L)); ReverseNested reverseNested = bucket.getAggregations().get("nested1_to_field1"); - assertThat((long) reverseNested.getProperty("_count"), equalTo(5L)); + assertThat(reverseNested.getProperty("_count"), equalTo(5L)); Terms tags = reverseNested.getAggregations().get("field1"); - assertThat((Terms) reverseNested.getProperty("field1"), sameInstance(tags)); + assertThat(reverseNested.getProperty("field1"), sameInstance(tags)); List tagsBuckets = new ArrayList<>(tags.getBuckets()); assertThat(tagsBuckets.size(), equalTo(6)); assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("c")); @@ -472,14 +469,25 @@ public class ReverseNestedIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(nested("nested2", "nested1.nested2").subAggregation(reverseNested("incorrect").path("nested3"))) - .execute().actionGet(); + .get(); Nested nested = searchResponse.getAggregations().get("nested2"); - assertThat(nested, Matchers.notNullValue()); + assertThat(nested, notNullValue()); assertThat(nested.getName(), equalTo("nested2")); ReverseNested reverseNested = nested.getAggregations().get("incorrect"); assertThat(reverseNested.getDocCount(), is(0L)); + + // Test that parsing the reverse_nested agg doesn't fail, because the parent nested agg is unmapped: + searchResponse = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(nested("incorrect1", "incorrect1").subAggregation(reverseNested("incorrect2").path("incorrect2"))) + .get(); + + nested = searchResponse.getAggregations().get("incorrect1"); + assertThat(nested, notNullValue()); + assertThat(nested.getName(), equalTo("incorrect1")); + assertThat(nested.getDocCount(), is(0L)); } public void testSameParentDocHavingMultipleBuckets() throws Exception { From b861ec1cc00dcecb1d00a59799a8f56830030471 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 25 Jul 2016 20:00:35 +0200 Subject: [PATCH 66/93] Allow empty json object in request body in `_count` API When the request body is missing, all documents in the target index are counted. As mentioned in #19422, the same should happen when the request body is an empty json object. This is also the behaviour for the `_search` endpoint and the two APIs should behave in the same way. --- .../index/query/QueryParseContext.java | 3 --- .../rest-api-spec/test/count/10_basic.yaml | 18 ++++++++++++++++++ 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java index 510a8049630..33382a7ab73 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java @@ -73,9 +73,6 @@ public class QueryParseContext implements ParseFieldMatcherSupplier { } } } - if (queryBuilder == null) { - throw new ParsingException(parser.getTokenLocation(), "Required query is missing"); - } return queryBuilder; } catch (ParsingException e) { throw e; diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/count/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/count/10_basic.yaml index f3eb0a5fae6..f38d2c315eb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/count/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/count/10_basic.yaml @@ -37,6 +37,24 @@ setup: - match: {count : 0} +--- +"count with empty body": +# empty body should default to match_all query + - do: + count: + index: test + type: test + body: { } + + - match: {count : 1} + + - do: + count: + index: test + type: test + + - match: {count : 1} + --- "count body without query element": - do: From 2c12c3e628dfe26c9d752fad902d06cf9b613fdc Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Mon, 25 Jul 2016 10:19:15 +0100 Subject: [PATCH 67/93] Add _bucket_count option to buckets_path This change adds a new special path to the buckets_path syntax `_bucket_count`. This new option will return the number of buckets for a multi-bucket aggregation, which can then be used in pipeline aggregations. Closes #19553 --- .../InternalMultiBucketAggregation.java | 6 ++- .../aggregations/bucket/DateHistogramIT.java | 3 +- .../aggregations/bucket/DateRangeIT.java | 5 ++- .../aggregations/bucket/DoubleTermsIT.java | 1 + .../search/aggregations/bucket/FiltersIT.java | 2 + .../aggregations/bucket/GeoDistanceIT.java | 3 +- .../aggregations/bucket/HistogramIT.java | 22 +++++----- docs/reference/aggregations/pipeline.asciidoc | 41 ++++++++++++++++++- 8 files changed, 64 insertions(+), 19 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java b/core/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java index f1e8b7358c0..a1326aaed11 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java @@ -43,7 +43,7 @@ public abstract class InternalMultiBucketAggregation path) { if (path.isEmpty()) { return this; + } else if (path.get(0).equals("_bucket_count")) { + return getBuckets().size(); } else { List buckets = getBuckets(); Object[] propertyArray = new Object[buckets.size()]; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 2c3534183e0..ace55c2c616 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -380,6 +380,7 @@ public class DateHistogramIT extends ESIntegTestCase { assertThat(histo.getName(), equalTo("histo")); List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(3)); + assertThat(histo.getProperty("_bucket_count"), equalTo(3)); Object[] propertiesKeys = (Object[]) histo.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) histo.getProperty("_count"); Object[] propertiesCounts = (Object[]) histo.getProperty("sum.value"); @@ -600,7 +601,7 @@ public class DateHistogramIT extends ESIntegTestCase { assertThat(histo.getBuckets().size(), equalTo(4)); // TODO: use diamond once JI-9019884 is fixed - List buckets = new ArrayList(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index 36613cfa784..1e250681004 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -137,7 +137,7 @@ public class DateRangeIT extends ESIntegTestCase { assertThat(range.getBuckets().size(), equalTo(3)); // TODO: use diamond once JI-9019884 is fixed - List buckets = new ArrayList(range.getBuckets()); + List buckets = new ArrayList<>(range.getBuckets()); Range.Bucket bucket = buckets.get(0); assertThat((String) bucket.getKey(), equalTo("a long time ago")); @@ -421,6 +421,7 @@ public class DateRangeIT extends ESIntegTestCase { assertThat(range.getName(), equalTo("range")); List buckets = range.getBuckets(); assertThat(buckets.size(), equalTo(3)); + assertThat(range.getProperty("_bucket_count"), equalTo(3)); Object[] propertiesKeys = (Object[]) range.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) range.getProperty("_count"); Object[] propertiesCounts = (Object[]) range.getProperty("sum.value"); @@ -855,7 +856,7 @@ public class DateRangeIT extends ESIntegTestCase { Range dateRange = bucket.getAggregations().get("date_range"); // TODO: use diamond once JI-9019884 is fixed - List buckets = new ArrayList(dateRange.getBuckets()); + List buckets = new ArrayList<>(dateRange.getBuckets()); assertThat(dateRange, Matchers.notNullValue()); assertThat(dateRange.getName(), equalTo("date_range")); assertThat(buckets.size(), is(1)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java index 6bcb07ef9d8..ed43ba8bd63 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java @@ -426,6 +426,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase { assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); + assertThat(terms.getProperty("_bucket_count"), equalTo(5)); Object[] propertiesKeys = (Object[]) terms.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) terms.getProperty("_count"); Object[] propertiesCounts = (Object[]) terms.getProperty("sum.value"); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java index 592861ccce2..619bba7da12 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java @@ -172,6 +172,7 @@ public class FiltersIT extends ESIntegTestCase { assertThat(filters.getName(), equalTo("tags")); assertThat(filters.getBuckets().size(), equalTo(2)); + assertThat(filters.getProperty("_bucket_count"), equalTo(2)); Object[] propertiesKeys = (Object[]) filters.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) filters.getProperty("_count"); Object[] propertiesCounts = (Object[]) filters.getProperty("avg_value.value"); @@ -426,6 +427,7 @@ public class FiltersIT extends ESIntegTestCase { assertThat(filters.getName(), equalTo("tags")); assertThat(filters.getBuckets().size(), equalTo(3)); + assertThat(filters.getProperty("_bucket_count"), equalTo(3)); Object[] propertiesKeys = (Object[]) filters.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) filters.getProperty("_count"); Object[] propertiesCounts = (Object[]) filters.getProperty("avg_value.value"); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index a27b193f435..a69513af77d 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -349,6 +349,7 @@ public class GeoDistanceIT extends ESIntegTestCase { assertThat(geoDist.getName(), equalTo("amsterdam_rings")); List buckets = geoDist.getBuckets(); assertThat(geoDist.getBuckets().size(), equalTo(3)); + assertThat(geoDist.getProperty("_bucket_count"), equalTo(3)); Object[] propertiesKeys = (Object[]) geoDist.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) geoDist.getProperty("_count"); Object[] propertiesCities = (Object[]) geoDist.getProperty("cities"); @@ -429,7 +430,7 @@ public class GeoDistanceIT extends ESIntegTestCase { Range geoDistance = bucket.getAggregations().get("geo_dist"); // TODO: use diamond once JI-9019884 is fixed - List buckets = new ArrayList(geoDistance.getBuckets()); + List buckets = new ArrayList<>(geoDistance.getBuckets()); assertThat(geoDistance, Matchers.notNullValue()); assertThat(geoDistance.getName(), equalTo("geo_dist")); assertThat(buckets.size(), is(1)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java index dc439318978..8df4a8ca608 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; @@ -253,7 +252,7 @@ public class HistogramIT extends ESIntegTestCase { assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed - List buckets = new ArrayList(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); @@ -276,7 +275,7 @@ public class HistogramIT extends ESIntegTestCase { assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed - List buckets = new ArrayList(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(numValueBuckets - i - 1); assertThat(bucket, notNullValue()); @@ -300,7 +299,7 @@ public class HistogramIT extends ESIntegTestCase { LongHashSet buckets = new LongHashSet(); // TODO: use diamond once JI-9019884 is fixed - List histoBuckets = new ArrayList(histo.getBuckets()); + List histoBuckets = new ArrayList<>(histo.getBuckets()); long previousCount = Long.MIN_VALUE; for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = histoBuckets.get(i); @@ -329,7 +328,7 @@ public class HistogramIT extends ESIntegTestCase { LongHashSet buckets = new LongHashSet(); // TODO: use diamond once JI-9019884 is fixed - List histoBuckets = new ArrayList(histo.getBuckets()); + List histoBuckets = new ArrayList<>(histo.getBuckets()); long previousCount = Long.MAX_VALUE; for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = histoBuckets.get(i); @@ -356,12 +355,13 @@ public class HistogramIT extends ESIntegTestCase { assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); + assertThat(histo.getProperty("_bucket_count"), equalTo(numValueBuckets)); Object[] propertiesKeys = (Object[]) histo.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) histo.getProperty("_count"); Object[] propertiesCounts = (Object[]) histo.getProperty("sum.value"); // TODO: use diamond once JI-9019884 is fixed - List buckets = new ArrayList(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); @@ -404,7 +404,7 @@ public class HistogramIT extends ESIntegTestCase { LongHashSet visited = new LongHashSet(); double previousSum = Double.NEGATIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed - List buckets = new ArrayList(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); @@ -448,7 +448,7 @@ public class HistogramIT extends ESIntegTestCase { LongHashSet visited = new LongHashSet(); double previousSum = Double.POSITIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed - List buckets = new ArrayList(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); @@ -492,7 +492,7 @@ public class HistogramIT extends ESIntegTestCase { LongHashSet visited = new LongHashSet(); double previousSum = Double.POSITIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed - List buckets = new ArrayList(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); @@ -538,7 +538,7 @@ public class HistogramIT extends ESIntegTestCase { LongHashSet visited = new LongHashSet(); double prevMax = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed - List buckets = new ArrayList(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); @@ -625,7 +625,7 @@ public class HistogramIT extends ESIntegTestCase { assertThat(histo.getBuckets().size(), equalTo(numValuesBuckets)); // TODO: use diamond once JI-9019884 is fixed - List buckets = new ArrayList(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValuesBuckets; ++i) { Histogram.Bucket bucket = buckets.get(numValuesBuckets - i - 1); assertThat(bucket, notNullValue()); diff --git a/docs/reference/aggregations/pipeline.asciidoc b/docs/reference/aggregations/pipeline.asciidoc index 7ed01d9dc7f..4382596159c 100644 --- a/docs/reference/aggregations/pipeline.asciidoc +++ b/docs/reference/aggregations/pipeline.asciidoc @@ -107,8 +107,7 @@ a metric embedded inside a sibling aggregation: === Special Paths Instead of pathing to a metric, `buckets_path` can use a special `"_count"` path. This instructs -the pipeline aggregation to use the document count as it's input. For example, a moving average can be calculated on the document -count of each bucket, instead of a specific metric: +the pipeline aggregation to use the document count as it's input. For example, a moving average can be calculated on the document count of each bucket, instead of a specific metric: [source,js] -------------------------------------------------- @@ -128,6 +127,44 @@ count of each bucket, instead of a specific metric: -------------------------------------------------- <1> By using `_count` instead of a metric name, we can calculate the moving average of document counts in the histogram +The `buckets_path` can also use `"_bucket_count"` and path to a multi-bucket aggregation to use the number of buckets +returned by that aggregation in the pipeline aggregation instead of a metric. for example a `bucket_selector` can be +used here to filter out buckets which contain no buckets for an inner terms aggregation: + +[source,js] +-------------------------------------------------- +{ + "size": 0, + "aggs": { + "histo": { + "date_histogram": { + "field": "date", + "interval": "day" + }, + "aggs": { + "categories": { + "terms": { + "field": "category" + } + }, + "min_bucket_selector": { + "bucket_selector": { + "buckets_path": { + "count": "categories._bucket_count" + }, + "script": { + "inline": "count != 0" + } + } + } + } + } + } +} +-------------------------------------------------- +<1> By using `_bucket_count` instead of a metric name, we can filter out `histo` buckets where they contain no buckets +for the `categories` aggregation + [[dots-in-agg-names]] [float] === Dealing with dots in agg names From c33f85bc37bbfee87158738f538d380c0cc31d13 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Tue, 26 Jul 2016 11:01:22 +0200 Subject: [PATCH 68/93] Add client benchmark With this commit we add a benchmark for the new REST client and the existing transport client. Closes #19281 --- client/benchmark/README.md | 35 ++++ client/benchmark/build.gradle | 48 +++++ .../client/benchmark/AbstractBenchmark.java | 88 +++++++++ .../client/benchmark/BenchmarkRunner.java | 84 +++++++++ .../client/benchmark/BenchmarkTask.java | 29 +++ .../client/benchmark/metrics/Metrics.java | 45 +++++ .../benchmark/metrics/MetricsCalculator.java | 80 ++++++++ .../client/benchmark/metrics/Sample.java | 54 ++++++ .../benchmark/metrics/SampleRecorder.java | 51 +++++ .../benchmark/ops/bulk/BulkBenchmarkTask.java | 174 ++++++++++++++++++ .../ops/bulk/BulkRequestExecutor.java | 25 +++ .../ops/search/SearchBenchmarkTask.java | 86 +++++++++ .../ops/search/SearchRequestExecutor.java | 23 +++ .../benchmark/rest/RestClientBenchmark.java | 108 +++++++++++ .../transport/TransportClientBenchmark.java | 117 ++++++++++++ .../src/main/resources/log4j.properties | 9 + settings.gradle | 1 + 17 files changed, 1057 insertions(+) create mode 100644 client/benchmark/README.md create mode 100644 client/benchmark/build.gradle create mode 100644 client/benchmark/src/main/java/org/elasticsearch/client/benchmark/AbstractBenchmark.java create mode 100644 client/benchmark/src/main/java/org/elasticsearch/client/benchmark/BenchmarkRunner.java create mode 100644 client/benchmark/src/main/java/org/elasticsearch/client/benchmark/BenchmarkTask.java create mode 100644 client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/Metrics.java create mode 100644 client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/MetricsCalculator.java create mode 100644 client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/Sample.java create mode 100644 client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/SampleRecorder.java create mode 100644 client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java create mode 100644 client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkRequestExecutor.java create mode 100644 client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/search/SearchBenchmarkTask.java create mode 100644 client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/search/SearchRequestExecutor.java create mode 100644 client/benchmark/src/main/java/org/elasticsearch/client/benchmark/rest/RestClientBenchmark.java create mode 100644 client/benchmark/src/main/java/org/elasticsearch/client/benchmark/transport/TransportClientBenchmark.java create mode 100644 client/benchmark/src/main/resources/log4j.properties diff --git a/client/benchmark/README.md b/client/benchmark/README.md new file mode 100644 index 00000000000..9ff2a327b33 --- /dev/null +++ b/client/benchmark/README.md @@ -0,0 +1,35 @@ +Steps to execute the benchmark: + +1. Start Elasticsearch on the target host (ideally *not* on the same machine) +2. Create an empty index with the mapping you want to benchmark +3. Start either the RestClientBenchmark class or the TransportClientBenchmark +4. Delete the index +5. Repeat steps 2. - 4. for multiple iterations. The first iterations are intended as warmup for Elasticsearch itself. Always start the same benchmark in step 3! +4. After the benchmark: Shutdown Elasticsearch and delete the data directory + +Repeat all steps above for the other benchmark candidate. + +Example benchmark: + +* Download benchmark data from http://benchmarks.elastic.co/corpora/geonames/documents.json.bz2 and decompress +* Use the mapping file https://github.com/elastic/rally-tracks/blob/master/geonames/mappings.json to create the index + +Example command line parameter list: + +``` +192.168.2.2 /home/your_user_name/.rally/benchmarks/data/geonames/documents.json geonames type 8647880 5000 "{ \"query\": { \"match_phrase\": { \"name\": \"Sankt Georgen\" } } }\"" +``` + +The parameters are in order: + +* Benchmark target host IP (the host where Elasticsearch is running) +* full path to the file that should be bulk indexed +* name of the index +* name of the (sole) type in the index +* number of documents in the file +* bulk size +* a search request body (remember to escape double quotes). The `TransportClientBenchmark` uses `QueryBuilders.wrapperQuery()` internally which automatically adds a root key `query`, so it must not be present in the command line parameter. + +You should also define a few GC-related settings `-Xms4096M -Xmx4096M -XX:+UseConcMarkSweepGC -verbose:gc -XX:+PrintGCDetails` and keep an eye on GC activity. You can also define `-XX:+PrintCompilation` to see JIT activity. + + diff --git a/client/benchmark/build.gradle b/client/benchmark/build.gradle new file mode 100644 index 00000000000..ebaff611627 --- /dev/null +++ b/client/benchmark/build.gradle @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +apply plugin: 'elasticsearch.build' + +group = 'org.elasticsearch.client' + +// never try to invoke tests on the benchmark project - there aren't any +check.dependsOn.remove(test) +// explicitly override the test task too in case somebody invokes 'gradle test' so it won't trip +task test(type: Test, overwrite: true) + +dependencies { + compile 'org.apache.commons:commons-math3:3.2' + + compile("org.elasticsearch.client:rest:${version}") + // for transport client + compile("org.elasticsearch:elasticsearch:${version}") + compile("org.elasticsearch.client:transport:${version}") + compile project(path: ':modules:transport-netty3', configuration: 'runtime') + compile project(path: ':modules:transport-netty4', configuration: 'runtime') + compile project(path: ':modules:reindex', configuration: 'runtime') + compile project(path: ':modules:lang-mustache', configuration: 'runtime') + compile project(path: ':modules:percolator', configuration: 'runtime') +} + +// No licenses for our benchmark deps (we don't ship benchmarks) +dependencyLicenses.enabled = false + +extraArchive { + javadoc = false +} diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/AbstractBenchmark.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/AbstractBenchmark.java new file mode 100644 index 00000000000..dc018f03dc4 --- /dev/null +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/AbstractBenchmark.java @@ -0,0 +1,88 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.benchmark; + +import org.elasticsearch.client.benchmark.ops.bulk.BulkBenchmarkTask; +import org.elasticsearch.client.benchmark.ops.bulk.BulkRequestExecutor; +import org.elasticsearch.client.benchmark.ops.search.SearchBenchmarkTask; +import org.elasticsearch.client.benchmark.ops.search.SearchRequestExecutor; +import org.elasticsearch.common.SuppressForbidden; + +import java.io.Closeable; + +public abstract class AbstractBenchmark { + private static final int SEARCH_BENCHMARK_ITERATIONS = 10_000; + + protected abstract T client(String benchmarkTargetHost) throws Exception; + + protected abstract BulkRequestExecutor bulkRequestExecutor(T client, String indexName, String typeName); + + protected abstract SearchRequestExecutor searchRequestExecutor(T client, String indexName); + + @SuppressForbidden(reason = "system out is ok for a command line tool") + public final void run(String[] args) throws Exception { + if (args.length < 6) { + System.err.println( + "usage: benchmarkTargetHostIp indexFilePath indexName typeName numberOfDocuments bulkSize [search request body]"); + System.exit(1); + } + String benchmarkTargetHost = args[0]; + String indexFilePath = args[1]; + String indexName = args[2]; + String typeName = args[3]; + int totalDocs = Integer.valueOf(args[4]); + int bulkSize = Integer.valueOf(args[5]); + + int totalIterationCount = (int) Math.floor(totalDocs / bulkSize); + // consider 40% of all iterations as warmup iterations + int warmupIterations = (int) (0.4d * totalIterationCount); + int iterations = totalIterationCount - warmupIterations; + String searchBody = (args.length == 7) ? args[6] : null; + + T client = client(benchmarkTargetHost); + + BenchmarkRunner benchmark = new BenchmarkRunner(warmupIterations, iterations, + new BulkBenchmarkTask( + bulkRequestExecutor(client, indexName, typeName), indexFilePath, warmupIterations + iterations, bulkSize)); + + try { + benchmark.run(); + if (searchBody != null) { + for (int run = 1; run <= 5; run++) { + System.out.println("============="); + System.out.println(" Trial run " + run); + System.out.println("============="); + + for (int throughput = 100; throughput <= 100_000; throughput *= 10) { + //request a GC between trials to reduce the likelihood of a GC occurring in the middle of a trial. + System.gc(); + + BenchmarkRunner searchBenchmark = new BenchmarkRunner(SEARCH_BENCHMARK_ITERATIONS, SEARCH_BENCHMARK_ITERATIONS, + new SearchBenchmarkTask( + searchRequestExecutor(client, indexName), searchBody, 2 * SEARCH_BENCHMARK_ITERATIONS, throughput)); + System.out.printf("Target throughput = %d ops / s%n", throughput); + searchBenchmark.run(); + } + } + } + } finally { + client.close(); + } + } +} diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/BenchmarkRunner.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/BenchmarkRunner.java new file mode 100644 index 00000000000..655b5815f35 --- /dev/null +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/BenchmarkRunner.java @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.benchmark; + +import org.elasticsearch.client.benchmark.metrics.Metrics; +import org.elasticsearch.client.benchmark.metrics.MetricsCalculator; +import org.elasticsearch.client.benchmark.metrics.Sample; +import org.elasticsearch.client.benchmark.metrics.SampleRecorder; +import org.elasticsearch.common.SuppressForbidden; + +import java.util.Arrays; +import java.util.List; +import java.util.Locale; + +public final class BenchmarkRunner { + private final int warmupIterations; + private final int iterations; + private final BenchmarkTask task; + + public BenchmarkRunner(int warmupIterations, int iterations, BenchmarkTask task) { + this.warmupIterations = warmupIterations; + this.iterations = iterations; + this.task = task; + } + + @SuppressForbidden(reason = "system out is ok for a command line tool") + public void run() throws Exception { + SampleRecorder recorder = new SampleRecorder(warmupIterations, iterations); + System.out.printf("Running %s with %d warmup iterations and %d iterations.%n", + task.getClass().getSimpleName(), warmupIterations, iterations); + + try { + task.setUp(recorder); + task.run(); + task.tearDown(); + } catch (InterruptedException ex) { + Thread.currentThread().interrupt(); + return; + } + + List samples = recorder.getSamples(); + final List summaryMetrics = MetricsCalculator.calculate(samples); + + if (summaryMetrics.isEmpty()) { + System.out.println("No results."); + } + + for (Metrics metrics : summaryMetrics) { + System.out.printf(Locale.ROOT, "Operation: %s%n", metrics.operation); + String stats = String.format(Locale.ROOT, + "Throughput = %f ops/s, p90 = %f ms, p95 = %f ms, p99 = %f ms, p99.9 = %f ms, p99.99 = %f ms", + metrics.throughput, + metrics.serviceTimeP90, metrics.serviceTimeP95, + metrics.serviceTimeP99, metrics.serviceTimeP999, + metrics.serviceTimeP9999); + System.out.println(repeat(stats.length(), '-')); + System.out.println(stats); + System.out.printf("success count = %d, error count = %d%n", metrics.successCount, metrics.errorCount); + System.out.println(repeat(stats.length(), '-')); + } + } + + private String repeat(int times, char character) { + char[] characters = new char[times]; + Arrays.fill(characters, character); + return new String(characters); + } +} diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/BenchmarkTask.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/BenchmarkTask.java new file mode 100644 index 00000000000..5aabd0e1940 --- /dev/null +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/BenchmarkTask.java @@ -0,0 +1,29 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.benchmark; + +import org.elasticsearch.client.benchmark.metrics.SampleRecorder; + +public interface BenchmarkTask { + void setUp(SampleRecorder sampleRecorder) throws Exception; + + void run() throws Exception; + + void tearDown() throws Exception; +} diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/Metrics.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/Metrics.java new file mode 100644 index 00000000000..9108afe4446 --- /dev/null +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/Metrics.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.benchmark.metrics; + +public final class Metrics { + public final String operation; + public final long successCount; + public final long errorCount; + public final double throughput; + public final double serviceTimeP90; + public final double serviceTimeP95; + public final double serviceTimeP99; + public final double serviceTimeP999; + public final double serviceTimeP9999; + + public Metrics(String operation, long successCount, long errorCount, double throughput, + double serviceTimeP90, double serviceTimeP95, double serviceTimeP99, + double serviceTimeP999, double serviceTimeP9999) { + this.operation = operation; + this.successCount = successCount; + this.errorCount = errorCount; + this.throughput = throughput; + this.serviceTimeP90 = serviceTimeP90; + this.serviceTimeP95 = serviceTimeP95; + this.serviceTimeP99 = serviceTimeP99; + this.serviceTimeP999 = serviceTimeP999; + this.serviceTimeP9999 = serviceTimeP9999; + } +} diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/MetricsCalculator.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/MetricsCalculator.java new file mode 100644 index 00000000000..5b455127f52 --- /dev/null +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/MetricsCalculator.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.benchmark.metrics; + +import org.apache.commons.math3.stat.StatUtils; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +public final class MetricsCalculator { + public static List calculate(Collection samples) { + Map> samplesPerOperation = groupByOperation(samples); + return calculateMetricsPerOperation(samplesPerOperation); + } + + private static Map> groupByOperation(Collection samples) { + Map> samplesPerOperation = new HashMap<>(); + + for (Sample sample : samples) { + if (!samplesPerOperation.containsKey(sample.getOperation())) { + samplesPerOperation.put(sample.getOperation(), new ArrayList<>()); + } + samplesPerOperation.get(sample.getOperation()).add(sample); + } + return samplesPerOperation; + } + + private static List calculateMetricsPerOperation(Map> samplesPerOperation) { + List metrics = new ArrayList<>(); + for (Map.Entry> operationAndMetrics : samplesPerOperation.entrySet()) { + List samples = operationAndMetrics.getValue(); + double[] serviceTimes = new double[samples.size()]; + int it = 0; + long firstStart = Long.MAX_VALUE; + long latestEnd = Long.MIN_VALUE; + for (Sample sample : samples) { + firstStart = Math.min(sample.getStartTimestamp(), firstStart); + latestEnd = Math.max(sample.getStopTimestamp(), latestEnd); + serviceTimes[it++] = sample.getServiceTime(); + } + + metrics.add(new Metrics(operationAndMetrics.getKey(), + samples.stream().filter((r) -> r.isSuccess()).count(), + samples.stream().filter((r) -> !r.isSuccess()).count(), + // throughput calculation is based on the total (Wall clock) time it took to generate all samples + calculateThroughput(samples.size(), latestEnd - firstStart), + // convert ns -> ms without losing precision + StatUtils.percentile(serviceTimes, 90.0d) / TimeUnit.MILLISECONDS.toNanos(1L), + StatUtils.percentile(serviceTimes, 95.0d) / TimeUnit.MILLISECONDS.toNanos(1L), + StatUtils.percentile(serviceTimes, 99.0d) / TimeUnit.MILLISECONDS.toNanos(1L), + StatUtils.percentile(serviceTimes, 99.9d) / TimeUnit.MILLISECONDS.toNanos(1L), + StatUtils.percentile(serviceTimes, 99.99d) / TimeUnit.MILLISECONDS.toNanos(1L))); + } + return metrics; + } + + private static double calculateThroughput(int sampleSize, double duration) { + return sampleSize * (TimeUnit.SECONDS.toNanos(1L) / duration); + } +} diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/Sample.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/Sample.java new file mode 100644 index 00000000000..59cd6bfd101 --- /dev/null +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/Sample.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.benchmark.metrics; + +public final class Sample { + private final String operation; + private final long startTimestamp; + private final long stopTimestamp; + private final boolean success; + + public Sample(String operation, long startTimestamp, long stopTimestamp, boolean success) { + this.operation = operation; + this.startTimestamp = startTimestamp; + this.stopTimestamp = stopTimestamp; + this.success = success; + } + + public String getOperation() { + return operation; + } + + public boolean isSuccess() { + return success; + } + + public long getStartTimestamp() { + return startTimestamp; + } + + public long getStopTimestamp() { + return stopTimestamp; + } + + public long getServiceTime() { + // this is *not* latency, we're not including wait time in the queue (on purpose) + return stopTimestamp - startTimestamp; + } +} diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/SampleRecorder.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/SampleRecorder.java new file mode 100644 index 00000000000..d9f24aea004 --- /dev/null +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/metrics/SampleRecorder.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.benchmark.metrics; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * Stores measurement samples. + * + * This class is NOT threadsafe. + */ +public final class SampleRecorder { + private final int warmupIterations; + private final List samples; + private int currentIteration; + + public SampleRecorder(int warmupIterations, int iterations) { + this.warmupIterations = warmupIterations; + this.samples = new ArrayList<>(iterations); + } + + public void addSample(Sample sample) { + currentIteration++; + // only add samples after warmup + if (currentIteration > warmupIterations) { + samples.add(sample); + } + } + + public List getSamples() { + return Collections.unmodifiableList(samples); + } +} diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java new file mode 100644 index 00000000000..5844103fd1e --- /dev/null +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java @@ -0,0 +1,174 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.benchmark.ops.bulk; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.client.benchmark.BenchmarkTask; +import org.elasticsearch.client.benchmark.metrics.Sample; +import org.elasticsearch.client.benchmark.metrics.SampleRecorder; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.ESLoggerFactory; + +import java.io.BufferedReader; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; + +public class BulkBenchmarkTask implements BenchmarkTask { + private final BulkRequestExecutor requestExecutor; + private final String indexFilePath; + private final int totalIterations; + private final int bulkSize; + private LoadGenerator generator; + private ExecutorService executorService; + + public BulkBenchmarkTask(BulkRequestExecutor requestExecutor, String indexFilePath, int totalIterations, int bulkSize) { + this.requestExecutor = requestExecutor; + this.indexFilePath = indexFilePath; + this.totalIterations = totalIterations; + this.bulkSize = bulkSize; + } + + @Override + @SuppressForbidden(reason = "PathUtils#get is fine - we don't have environment here") + public void setUp(SampleRecorder sampleRecorder) { + BlockingQueue> bulkQueue = new ArrayBlockingQueue<>(256); + + BulkIndexer runner = new BulkIndexer(bulkQueue, totalIterations, sampleRecorder, requestExecutor); + + executorService = Executors.newSingleThreadExecutor((r) -> new Thread(r, "bulk-index-runner")); + executorService.submit(runner); + + generator = new LoadGenerator(PathUtils.get(indexFilePath), bulkQueue, bulkSize); + } + + @Override + @SuppressForbidden(reason = "system out is ok for a command line tool") + public void run() throws Exception { + generator.execute(); + // when the generator is done, there are no more data -> shutdown client + executorService.shutdown(); + //We need to wait until the queue is drained + final boolean finishedNormally = executorService.awaitTermination(20, TimeUnit.MINUTES); + if (finishedNormally == false) { + System.err.println("Background tasks are still running after timeout on enclosing pool. Forcing pool shutdown."); + executorService.shutdownNow(); + } + } + + @Override + public void tearDown() { + //no op + } + + private static final class LoadGenerator { + private final Path bulkDataFile; + private final BlockingQueue> bulkQueue; + private final int bulkSize; + + public LoadGenerator(Path bulkDataFile, BlockingQueue> bulkQueue, int bulkSize) { + this.bulkDataFile = bulkDataFile; + this.bulkQueue = bulkQueue; + this.bulkSize = bulkSize; + } + + @SuppressForbidden(reason = "Classic I/O is fine in non-production code") + public void execute() { + try (BufferedReader reader = Files.newBufferedReader(bulkDataFile, StandardCharsets.UTF_8)) { + String line; + int bulkIndex = 0; + List bulkData = new ArrayList<>(bulkSize); + while ((line = reader.readLine()) != null) { + if (bulkIndex == bulkSize) { + sendBulk(bulkData); + // reset data structures + bulkData = new ArrayList<>(bulkSize); + bulkIndex = 0; + } + bulkData.add(line); + bulkIndex++; + } + // also send the last bulk: + if (bulkIndex > 0) { + sendBulk(bulkData); + } + } catch (IOException e) { + throw new ElasticsearchException(e); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + + private void sendBulk(List bulkData) throws InterruptedException { + bulkQueue.put(bulkData); + } + } + + + private static final class BulkIndexer implements Runnable { + private static final ESLogger logger = ESLoggerFactory.getLogger(BulkIndexer.class.getName()); + + private final BlockingQueue> bulkData; + private final int totalIterations; + private final BulkRequestExecutor bulkRequestExecutor; + private final SampleRecorder sampleRecorder; + + public BulkIndexer(BlockingQueue> bulkData, int totalIterations, SampleRecorder sampleRecorder, + BulkRequestExecutor bulkRequestExecutor) { + this.bulkData = bulkData; + this.totalIterations = totalIterations; + this.bulkRequestExecutor = bulkRequestExecutor; + this.sampleRecorder = sampleRecorder; + } + + @Override + public void run() { + for (int iteration = 0; iteration < totalIterations; iteration++) { + boolean success = false; + List currentBulk; + try { + currentBulk = bulkData.take(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return; + } + // Yes, this approach is prone to coordinated omission *but* we have to consider that we want to benchmark a closed system + // with backpressure here instead of an open system. So this is actually correct in this case. + long start = System.nanoTime(); + try { + success = bulkRequestExecutor.bulkIndex(currentBulk); + } catch (Exception ex) { + logger.warn("Error while executing bulk request", ex); + } + long stop = System.nanoTime(); + sampleRecorder.addSample(new Sample("bulk", start, stop, success)); + } + } + } +} diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkRequestExecutor.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkRequestExecutor.java new file mode 100644 index 00000000000..be95153132f --- /dev/null +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkRequestExecutor.java @@ -0,0 +1,25 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.benchmark.ops.bulk; + +import java.util.List; + +public interface BulkRequestExecutor { + boolean bulkIndex(List bulkData); +} diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/search/SearchBenchmarkTask.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/search/SearchBenchmarkTask.java new file mode 100644 index 00000000000..1476e9ebba5 --- /dev/null +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/search/SearchBenchmarkTask.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.benchmark.ops.search; + +import org.elasticsearch.client.benchmark.BenchmarkTask; +import org.elasticsearch.client.benchmark.metrics.Sample; +import org.elasticsearch.client.benchmark.metrics.SampleRecorder; + +import java.util.concurrent.TimeUnit; + +public class SearchBenchmarkTask implements BenchmarkTask { + private static final long MICROS_PER_SEC = TimeUnit.SECONDS.toMicros(1L); + private static final long NANOS_PER_MICRO = TimeUnit.MICROSECONDS.toNanos(1L); + + private final SearchRequestExecutor searchRequestExecutor; + private final String searchRequestBody; + private final int iterations; + private final int targetThroughput; + + private SampleRecorder sampleRecorder; + + public SearchBenchmarkTask(SearchRequestExecutor searchRequestExecutor, String body, int iterations, int targetThroughput) { + this.searchRequestExecutor = searchRequestExecutor; + this.searchRequestBody = body; + this.iterations = iterations; + this.targetThroughput = targetThroughput; + } + + @Override + public void setUp(SampleRecorder sampleRecorder) throws Exception { + this.sampleRecorder = sampleRecorder; + } + + @Override + public void run() throws Exception { + for (int iteration = 0; iteration < this.iterations; iteration++) { + final long start = System.nanoTime(); + boolean success = searchRequestExecutor.search(searchRequestBody); + final long stop = System.nanoTime(); + sampleRecorder.addSample(new Sample("search", start, stop, success)); + + int waitTime = (int) Math.floor(MICROS_PER_SEC / targetThroughput - (stop - start) / NANOS_PER_MICRO); + if (waitTime > 0) { + // Thread.sleep() time is not very accurate (it's most of the time around 1 - 2 ms off) + // so we rather busy spin for the last few microseconds. Still not entirely accurate but way closer + waitMicros(waitTime); + } + } + } + + private void waitMicros(int waitTime) throws InterruptedException { + int millis = waitTime / 1000; + int micros = waitTime % 1000; + if (millis > 0) { + Thread.sleep(millis); + } + // busy spin for the rest of the time + if (micros > 0) { + long end = System.nanoTime() + 1000L * micros; + while (end > System.nanoTime()) { + // busy spin + } + } + } + + @Override + public void tearDown() throws Exception { + // no op + } +} diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/search/SearchRequestExecutor.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/search/SearchRequestExecutor.java new file mode 100644 index 00000000000..abdfecf73c6 --- /dev/null +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/search/SearchRequestExecutor.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.benchmark.ops.search; + +public interface SearchRequestExecutor { + boolean search(String source); +} diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/rest/RestClientBenchmark.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/rest/RestClientBenchmark.java new file mode 100644 index 00000000000..bf661fa661f --- /dev/null +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/rest/RestClientBenchmark.java @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.benchmark.rest; + +import org.apache.http.HttpEntity; +import org.apache.http.HttpHost; +import org.apache.http.HttpStatus; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.nio.entity.NStringEntity; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.benchmark.AbstractBenchmark; +import org.elasticsearch.client.benchmark.ops.bulk.BulkRequestExecutor; +import org.elasticsearch.client.benchmark.ops.search.SearchRequestExecutor; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.List; +import java.util.Locale; + +public final class RestClientBenchmark extends AbstractBenchmark { + public static void main(String[] args) throws Exception { + RestClientBenchmark b = new RestClientBenchmark(); + b.run(args); + } + + @Override + protected RestClient client(String benchmarkTargetHost) { + return RestClient.builder(new HttpHost(benchmarkTargetHost, 9200)).build(); + } + + @Override + protected BulkRequestExecutor bulkRequestExecutor(RestClient client, String indexName, String typeName) { + return new RestBulkRequestExecutor(client, indexName, typeName); + } + + @Override + protected SearchRequestExecutor searchRequestExecutor(RestClient client, String indexName) { + return new RestSearchRequestExecutor(client, indexName); + } + + private static final class RestBulkRequestExecutor implements BulkRequestExecutor { + private final RestClient client; + private final String actionMetaData; + + public RestBulkRequestExecutor(RestClient client, String index, String type) { + this.client = client; + this.actionMetaData = String.format(Locale.ROOT, "{ \"index\" : { \"_index\" : \"%s\", \"_type\" : \"%s\" } }%n", index, type); + } + + @Override + public boolean bulkIndex(List bulkData) { + StringBuilder bulkRequestBody = new StringBuilder(); + for (String bulkItem : bulkData) { + bulkRequestBody.append(actionMetaData); + bulkRequestBody.append(bulkItem); + bulkRequestBody.append("\n"); + } + HttpEntity entity = new NStringEntity(bulkRequestBody.toString(), ContentType.APPLICATION_JSON); + try { + Response response = client.performRequest("POST", "/geonames/type/_bulk", Collections.emptyMap(), entity); + return response.getStatusLine().getStatusCode() == HttpStatus.SC_OK; + } catch (Exception e) { + throw new ElasticsearchException(e); + } + } + } + + private static final class RestSearchRequestExecutor implements SearchRequestExecutor { + private final RestClient client; + private final String endpoint; + + private RestSearchRequestExecutor(RestClient client, String indexName) { + this.client = client; + this.endpoint = "/" + indexName + "/_search"; + } + + @Override + public boolean search(String source) { + HttpEntity searchBody = new NStringEntity(source, StandardCharsets.UTF_8); + try { + Response response = client.performRequest("GET", endpoint, Collections.emptyMap(), searchBody); + return response.getStatusLine().getStatusCode() == HttpStatus.SC_OK; + } catch (IOException e) { + throw new ElasticsearchException(e); + } + } + } +} diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/transport/TransportClientBenchmark.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/transport/TransportClientBenchmark.java new file mode 100644 index 00000000000..c52414cf3a4 --- /dev/null +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/transport/TransportClientBenchmark.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.benchmark.transport; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.benchmark.AbstractBenchmark; +import org.elasticsearch.client.benchmark.ops.bulk.BulkRequestExecutor; +import org.elasticsearch.client.benchmark.ops.search.SearchRequestExecutor; +import org.elasticsearch.client.transport.TransportClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.transport.client.PreBuiltTransportClient; + +import java.net.InetAddress; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.concurrent.ExecutionException; + +public final class TransportClientBenchmark extends AbstractBenchmark { + public static void main(String[] args) throws Exception { + TransportClientBenchmark benchmark = new TransportClientBenchmark(); + benchmark.run(args); + } + + @Override + protected TransportClient client(String benchmarkTargetHost) throws Exception { + TransportClient client = new PreBuiltTransportClient(Settings.EMPTY); + client.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(benchmarkTargetHost), 9300)); + return client; + } + + @Override + protected BulkRequestExecutor bulkRequestExecutor(TransportClient client, String indexName, String typeName) { + return new TransportBulkRequestExecutor(client, indexName, typeName); + } + + @Override + protected SearchRequestExecutor searchRequestExecutor(TransportClient client, String indexName) { + return new TransportSearchRequestExecutor(client, indexName); + } + + private static final class TransportBulkRequestExecutor implements BulkRequestExecutor { + private final TransportClient client; + private final String indexName; + private final String typeName; + + public TransportBulkRequestExecutor(TransportClient client, String indexName, String typeName) { + this.client = client; + this.indexName = indexName; + this.typeName = typeName; + } + + @Override + public boolean bulkIndex(List bulkData) { + BulkRequestBuilder builder = client.prepareBulk(); + for (String bulkItem : bulkData) { + builder.add(new IndexRequest(indexName, typeName).source(bulkItem.getBytes(StandardCharsets.UTF_8))); + } + BulkResponse bulkResponse; + try { + bulkResponse = builder.execute().get(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return false; + } catch (ExecutionException e) { + throw new ElasticsearchException(e); + } + return !bulkResponse.hasFailures(); + } + } + + private static final class TransportSearchRequestExecutor implements SearchRequestExecutor { + private final TransportClient client; + private final String indexName; + + private TransportSearchRequestExecutor(TransportClient client, String indexName) { + this.client = client; + this.indexName = indexName; + } + + @Override + public boolean search(String source) { + final SearchResponse response; + try { + response = client.prepareSearch(indexName).setQuery(QueryBuilders.wrapperQuery(source)).execute().get(); + return response.status() == RestStatus.OK; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return false; + } catch (ExecutionException e) { + throw new ElasticsearchException(e); + } + } + } +} diff --git a/client/benchmark/src/main/resources/log4j.properties b/client/benchmark/src/main/resources/log4j.properties new file mode 100644 index 00000000000..22f54ef68e5 --- /dev/null +++ b/client/benchmark/src/main/resources/log4j.properties @@ -0,0 +1,9 @@ +es.logger.level=INFO +log4j.rootLogger=${es.logger.level}, out + +log4j.logger.org.apache.http=INFO, out +log4j.additivity.org.apache.http=false + +log4j.appender.out=org.apache.log4j.ConsoleAppender +log4j.appender.out.layout=org.apache.log4j.PatternLayout +log4j.appender.out.layout.conversionPattern=[%d{ISO8601}][%-5p][%-25c] %m%n diff --git a/settings.gradle b/settings.gradle index d04e4b233a6..8aeb694b51d 100644 --- a/settings.gradle +++ b/settings.gradle @@ -9,6 +9,7 @@ List projects = [ 'client:sniffer', 'client:transport', 'client:test', + 'client:benchmark', 'benchmarks', 'distribution:integ-test-zip', 'distribution:zip', From 7ed64af639439b462cd03fc39b7da9b434a96064 Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Tue, 26 Jul 2016 11:33:54 +0100 Subject: [PATCH 69/93] [DOCS] fix callout in buckets path docs --- docs/reference/aggregations/pipeline.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/aggregations/pipeline.asciidoc b/docs/reference/aggregations/pipeline.asciidoc index 4382596159c..4ca249d31db 100644 --- a/docs/reference/aggregations/pipeline.asciidoc +++ b/docs/reference/aggregations/pipeline.asciidoc @@ -150,7 +150,7 @@ used here to filter out buckets which contain no buckets for an inner terms aggr "min_bucket_selector": { "bucket_selector": { "buckets_path": { - "count": "categories._bucket_count" + "count": "categories._bucket_count" <1> }, "script": { "inline": "count != 0" From 4bac61425c2ceb11751f42a1eb3e7e7ec2a5f89e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 26 Jul 2016 12:48:48 +0200 Subject: [PATCH 70/93] Adding unit tests for QueryParseContext --- .../index/query/QueryParseContext.java | 2 +- .../index/query/QueryParseContextTests.java | 128 ++++++++++++++++++ 2 files changed, 129 insertions(+), 1 deletion(-) create mode 100644 core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java index 33382a7ab73..daf0f6838b5 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java @@ -110,7 +110,7 @@ public class QueryParseContext implements ParseFieldMatcherSupplier { // move to the next START_OBJECT token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT && token != XContentParser.Token.START_ARRAY) { - throw new ParsingException(parser.getTokenLocation(), "[_na] query malformed, no field after start_object"); + throw new ParsingException(parser.getTokenLocation(), "[_na] query malformed, no start_object after query name"); } @SuppressWarnings("unchecked") Optional result = (Optional) indicesQueriesRegistry.lookup(queryName, parseFieldMatcher, diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java new file mode 100644 index 00000000000..fb88adfc95f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java @@ -0,0 +1,128 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.Optional; + +import static java.util.Collections.emptyList; + +public class QueryParseContextTests extends ESTestCase { + + private static IndicesQueriesRegistry indicesQueriesRegistry; + + @BeforeClass + public static void init() { + indicesQueriesRegistry = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, emptyList()) + .getQueryParserRegistry(); + } + + public void testParseTopLevelBuilder() throws IOException { + QueryBuilder query = new MatchQueryBuilder("foo", "bar"); + String requestBody = "{ \"query\" : " + query.toString() + "}"; + try (XContentParser parser = XContentFactory.xContent(requestBody).createParser(requestBody)) { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + QueryBuilder actual = context.parseTopLevelQueryBuilder(); + assertEquals(query, actual); + } + } + + public void testParseTopLevelBuilderEmptyObject() throws IOException { + String requestBody = "{}"; + try (XContentParser parser = XContentFactory.xContent(requestBody).createParser(requestBody)) { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + QueryBuilder query = context.parseTopLevelQueryBuilder(); + assertNull(query); + } + } + + public void testParseTopLevelBuilderUnknownParameter() throws IOException { + String requestBody = "{ \"foo\" : \"bar\"}"; + try (XContentParser parser = XContentFactory.xContent(requestBody).createParser(requestBody)) { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + ParsingException exception = expectThrows(ParsingException.class, () -> context.parseTopLevelQueryBuilder()); + assertEquals("request does not support [foo]", exception.getMessage()); + } + } + + public void testParseInnerQueryBuilder() throws IOException { + QueryBuilder query = new MatchQueryBuilder("foo", "bar"); + String source = query.toString(); + try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + Optional actual = context.parseInnerQueryBuilder(); + assertEquals(query, actual.get()); + } + } + + public void testParseInnerQueryBuilderEmptyBody() throws IOException { + String source = "{}"; + try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.EMPTY); + Optional emptyQuery = context.parseInnerQueryBuilder(); + assertFalse(emptyQuery.isPresent()); + } + } + + public void testParseInnerQueryBuilderExceptions() throws IOException { + String source = "{ \"foo\": \"bar\" }"; + try (XContentParser parser = JsonXContent.jsonXContent.createParser(source)) { + parser.nextToken(); + parser.nextToken(); // don't start with START_OBJECT to provoke exception + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder()); + assertEquals("[_na] query malformed, must start with start_object", exception.getMessage()); + } + + source = "{}"; + try (XContentParser parser = JsonXContent.jsonXContent.createParser(source)) { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> context.parseInnerQueryBuilder()); + assertEquals("query malformed, empty clause found at [1:2]", exception.getMessage()); + } + + source = "{ \"foo\" : \"bar\" }"; + try (XContentParser parser = JsonXContent.jsonXContent.createParser(source)) { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder()); + assertEquals("[_na] query malformed, no start_object after query name", exception.getMessage()); + } + + source = "{ \"foo\" : {} }"; + try (XContentParser parser = JsonXContent.jsonXContent.createParser(source)) { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder()); + assertEquals("no [query] registered for [foo]", exception.getMessage()); + } + } + +} From b208a7dbaeeb269e6d1e121f46a29cb6b0f8004f Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Tue, 26 Jul 2016 16:04:51 +0200 Subject: [PATCH 71/93] Add socket timeout in MockTcpTransport With this commit we set an explicit socket timeout in MockTcpTransport to avoid hanging tests in case of disconnections. --- .../elasticsearch/transport/AbstractSimpleTransportTestCase.java | 1 - .../main/java/org/elasticsearch/transport/MockTcpTransport.java | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 58cc45a75a1..e7f659751b5 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -488,7 +488,6 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { assertThat(latch.await(5, TimeUnit.SECONDS), equalTo(true)); } - @TestLogging("transport:DEBUG") public void testConcurrentSendRespondAndDisconnect() throws BrokenBarrierException, InterruptedException { Set sendingErrors = ConcurrentCollections.newConcurrentSet(); Set responseErrors = ConcurrentCollections.newConcurrentSet(); diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java index 44a2bbd2166..8a0828794da 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -210,6 +210,7 @@ public class MockTcpTransport extends TcpTransport private void configureSocket(Socket socket) throws SocketException { socket.setTcpNoDelay(TCP_NO_DELAY.get(settings)); + socket.setSoTimeout(15000); ByteSizeValue tcpSendBufferSize = TCP_SEND_BUFFER_SIZE.get(settings); if (tcpSendBufferSize.bytes() > 0) { socket.setSendBufferSize(tcpSendBufferSize.bytesAsInt()); From ccf275af7b422f78dd86a1b57a2bd9d37eb5828c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 26 Jul 2016 16:10:47 +0200 Subject: [PATCH 72/93] Adapt to introduction of ESClientYamlSuiteTestCase --- .../org/elasticsearch/action/quality/RankEvalRestIT.java | 6 +++--- .../org/elasticsearch/index/rankeval/QuerySpecTests.java | 4 +++- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/action/quality/RankEvalRestIT.java b/modules/rank-eval/src/test/java/org/elasticsearch/action/quality/RankEvalRestIT.java index b68385017f6..5a7c92b97bc 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/action/quality/RankEvalRestIT.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/action/quality/RankEvalRestIT.java @@ -22,19 +22,19 @@ package org.elasticsearch.action.quality; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class RankEvalRestIT extends ESRestTestCase { +public class RankEvalRestIT extends ESClientYamlSuiteTestCase { public RankEvalRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); + return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/QuerySpecTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/QuerySpecTests.java index 49ae4d0d6e4..19f30c8b458 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/QuerySpecTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/QuerySpecTests.java @@ -37,6 +37,8 @@ import org.junit.BeforeClass; import java.io.IOException; import java.util.List; +import static java.util.Collections.emptyList; + public class QuerySpecTests extends ESTestCase { private static IndicesQueriesRegistry queriesRegistry; @@ -50,7 +52,7 @@ public class QuerySpecTests extends ESTestCase { @BeforeClass public static void init() throws IOException { aggsParsers = new AggregatorParsers(new ParseFieldRegistry<>("aggregation"), new ParseFieldRegistry<>("aggregation_pipes")); - searchModule = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false); + searchModule = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, emptyList()); queriesRegistry = searchModule.getQueryParserRegistry(); suggesters = searchModule.getSuggesters(); } From ac53c90ff44dd0dde8b497e5875567bad4abd4cc Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 30 Jun 2016 16:40:37 -0600 Subject: [PATCH 73/93] Add 'elasticsearch-translog' CLI tool with 'translog' command MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This adds the `bin/elasticsearch-translate` bin file that will be used for CLI tasks pertaining to Elasticsearch. Currently it implements only a single sub-command, `truncate-translog`, that creates a truncated translog for a given folder. Here's what running the tool looks like: ``` λ bin/elasticsearch-translog truncate -d data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/ Checking existing translog files !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ! WARNING: Elasticsearch MUST be stopped before running this tool ! ! ! ! WARNING: Documents inside of translog files will be lost ! ! ! ! WARNING: The following files will be DELETED! ! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! --> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-10.tlog --> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-18.tlog --> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-21.tlog --> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-12.ckp --> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-25.ckp --> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-29.tlog --> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-2.tlog --> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-5.tlog --> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-41.ckp --> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-6.ckp --> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-37.ckp --> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-24.ckp --> data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-11.ckp Continue and DELETE files? [y/N] y Reading translog UUID information from Lucene commit from shard at [data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/index] Translog Generation: 3 Translog UUID : AxqC4rocTC6e0fwsljAh-Q Removing existing translog files Creating new empty checkpoint at [data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog.ckp] Creating new empty translog at [data/nodes/0/indices/P45vf_YQRhqjfwLMUvSqDw/0/translog/translog-3.tlog] Done. ``` It also includes a `-b` batch operation that can be used to skip the confirmation diaglog. Resolves #19123 --- .../index/translog/Checkpoint.java | 2 +- .../index/translog/TranslogReader.java | 4 +- .../index/translog/TranslogToolCli.java | 56 +++++ .../index/translog/TranslogWriter.java | 12 +- .../translog/TruncateTranslogCommand.java | 224 +++++++++++++++++ .../index/translog/TruncateTranslogIT.java | 236 ++++++++++++++++++ .../main/resources/bin/elasticsearch-translog | 90 +++++++ .../resources/bin/elasticsearch-translog.bat | 61 +++++ 8 files changed, 679 insertions(+), 6 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/index/translog/TranslogToolCli.java create mode 100644 core/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java create mode 100644 core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java create mode 100755 distribution/src/main/resources/bin/elasticsearch-translog create mode 100644 distribution/src/main/resources/bin/elasticsearch-translog.bat diff --git a/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java b/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java index f630ba3faba..0fd59090944 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java @@ -61,7 +61,7 @@ class Checkpoint { Channels.writeToChannel(buffer, channel); } - private void write(DataOutput out) throws IOException { + void write(DataOutput out) throws IOException { out.writeLong(offset); out.writeInt(numOps); out.writeLong(generation); diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java index fcb3daea796..581e8d6a903 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java @@ -113,7 +113,9 @@ public class TranslogReader extends BaseTranslogReader implements Closeable { headerStream.read(ref.bytes, ref.offset, ref.length); BytesRef uuidBytes = new BytesRef(translogUUID); if (uuidBytes.bytesEquals(ref) == false) { - throw new TranslogCorruptedException("expected shard UUID [" + uuidBytes + "] but got: [" + ref + "] this translog file belongs to a different translog. path:" + path); + throw new TranslogCorruptedException("expected shard UUID " + uuidBytes + "/" + uuidBytes.utf8ToString() + + " but got: " + ref + "/" + ref.utf8ToString() + + " this translog file belongs to a different translog. path:" + path); } return new TranslogReader(checkpoint.generation, channel, path, ref.length + CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC) + Integer.BYTES, checkpoint.offset, checkpoint.numOps); default: diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogToolCli.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogToolCli.java new file mode 100644 index 00000000000..eaf50f25a01 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogToolCli.java @@ -0,0 +1,56 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.translog; + +import org.elasticsearch.cli.MultiCommand; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.common.logging.LogConfigurator; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.translog.TruncateTranslogCommand; +import org.elasticsearch.node.internal.InternalSettingsPreparer; + +/** + * Class encapsulating and dispatching commands from the {@code elasticsearch-translog} command line tool + */ +public class TranslogToolCli extends MultiCommand { + + public TranslogToolCli() { + super("A CLI tool for various Elasticsearch translog actions"); + subcommands.put("truncate", new TruncateTranslogCommand()); + } + + public static void main(String[] args) throws Exception { + // initialize default for es.logger.level because we will not read the logging.yml + String loggerLevel = System.getProperty("es.logger.level", "INFO"); + String pathHome = System.getProperty("es.path.home"); + // Set the appender for all potential log files to terminal so that other components that use the logger print out the + // same terminal. + Environment loggingEnvironment = InternalSettingsPreparer.prepareEnvironment(Settings.builder() + .put("path.home", pathHome) + .put("appender.terminal.type", "terminal") + .put("rootLogger", "${logger.level}, terminal") + .put("logger.level", loggerLevel) + .build(), Terminal.DEFAULT); + LogConfigurator.configure(loggingEnvironment.settings(), false); + + exit(new TranslogToolCli().main(args, Terminal.DEFAULT)); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 89d12983b07..bb4a84651c5 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -76,10 +76,16 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { return getHeaderLength(new BytesRef(translogUUID).length); } - private static int getHeaderLength(int uuidLength) { + static int getHeaderLength(int uuidLength) { return CodecUtil.headerLength(TRANSLOG_CODEC) + uuidLength + Integer.BYTES; } + static void writeHeader(OutputStreamDataOutput out, BytesRef ref) throws IOException { + CodecUtil.writeHeader(out, TRANSLOG_CODEC, VERSION); + out.writeInt(ref.length); + out.writeBytes(ref.bytes, ref.offset, ref.length); + } + public static TranslogWriter create(ShardId shardId, String translogUUID, long fileGeneration, Path file, ChannelFactory channelFactory, ByteSizeValue bufferSize) throws IOException { final BytesRef ref = new BytesRef(translogUUID); final int headerLength = getHeaderLength(ref.length); @@ -88,9 +94,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { // This OutputStreamDataOutput is intentionally not closed because // closing it will close the FileChannel final OutputStreamDataOutput out = new OutputStreamDataOutput(java.nio.channels.Channels.newOutputStream(channel)); - CodecUtil.writeHeader(out, TRANSLOG_CODEC, VERSION); - out.writeInt(ref.length); - out.writeBytes(ref.bytes, ref.offset, ref.length); + writeHeader(out, ref); channel.force(true); writeCheckpoint(channelFactory, headerLength, 0, file.getParent(), fileGeneration); final TranslogWriter writer = new TranslogWriter(channelFactory, shardId, fileGeneration, channel, file, bufferSize); diff --git a/core/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java b/core/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java new file mode 100644 index 00000000000..b6b91f14ba8 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java @@ -0,0 +1,224 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.translog; + +import joptsimple.OptionParser; +import joptsimple.OptionSet; +import joptsimple.OptionSpec; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.Lock; +import org.apache.lucene.store.LockObtainFailedException; +import org.apache.lucene.store.NativeFSLockFactory; +import org.apache.lucene.store.OutputStreamDataOutput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.cli.SettingCommand; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.translog.Checkpoint; + +import java.io.IOException; +import java.nio.channels.Channels; +import java.nio.channels.FileChannel; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.nio.file.StandardOpenOption; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class TruncateTranslogCommand extends SettingCommand { + + private final OptionSpec translogFolder; + private final OptionSpec batchMode; + + public TruncateTranslogCommand() { + super("Truncates a translog to create a new, empty translog"); + this.translogFolder = parser.acceptsAll(Arrays.asList("d", "dir"), + "Translog Directory location on disk") + .withRequiredArg() + .required(); + this.batchMode = parser.acceptsAll(Arrays.asList("b", "batch"), + "Enable batch mode explicitly, automatic confirmation of warnings"); + } + + // Visible for testing + public OptionParser getParser() { + return this.parser; + } + + @Override + protected void printAdditionalHelp(Terminal terminal) { + terminal.println("This tool truncates the translog and translog"); + terminal.println("checkpoint files to create a new translog"); + } + + @SuppressForbidden(reason = "Necessary to use the path passed in") + private Path getTranslogPath(OptionSet options) { + return PathUtils.get(translogFolder.value(options), "", ""); + } + + @Override + protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { + boolean batch = options.has(batchMode); + + Path translogPath = getTranslogPath(options); + Path idxLocation = translogPath.getParent().resolve("index"); + + if (Files.exists(translogPath) == false || Files.isDirectory(translogPath) == false) { + throw new ElasticsearchException("translog directory [" + translogPath + "], must exist and be a directory"); + } + + if (Files.exists(idxLocation) == false || Files.isDirectory(idxLocation) == false) { + throw new ElasticsearchException("unable to find a shard at [" + idxLocation + "], which must exist and be a directory"); + } + + // Hold the lock open for the duration of the tool running + try (Directory dir = FSDirectory.open(idxLocation, NativeFSLockFactory.INSTANCE); + Lock writeLock = dir.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { + Set translogFiles; + try { + terminal.println("Checking existing translog files"); + translogFiles = filesInDirectory(translogPath); + } catch (IOException e) { + terminal.println("encountered IOException while listing directory, aborting..."); + throw new ElasticsearchException("failed to find existing translog files", e); + } + + // Warn about ES being stopped and files being deleted + warnAboutDeletingFiles(terminal, translogFiles, batch); + + List commits; + try { + terminal.println("Reading translog UUID information from Lucene commit from shard at [" + idxLocation + "]"); + commits = DirectoryReader.listCommits(dir); + } catch (IndexNotFoundException infe) { + throw new ElasticsearchException("unable to find a valid shard at [" + idxLocation + "]", infe); + } + + // Retrieve the generation and UUID from the existing data + Map commitData = commits.get(commits.size() - 1).getUserData(); + String translogGeneration = commitData.get(Translog.TRANSLOG_GENERATION_KEY); + String translogUUID = commitData.get(Translog.TRANSLOG_UUID_KEY); + if (translogGeneration == null || translogUUID == null) { + throw new ElasticsearchException("shard must have a valid translog generation and UUID but got: [{}] and: [{}]", + translogGeneration, translogUUID); + } + terminal.println("Translog Generation: " + translogGeneration); + terminal.println("Translog UUID : " + translogUUID); + + Path tempEmptyCheckpoint = translogPath.resolve("temp-" + Translog.CHECKPOINT_FILE_NAME); + Path realEmptyCheckpoint = translogPath.resolve(Translog.CHECKPOINT_FILE_NAME); + Path tempEmptyTranslog = translogPath.resolve("temp-" + Translog.TRANSLOG_FILE_PREFIX + + translogGeneration + Translog.TRANSLOG_FILE_SUFFIX); + Path realEmptyTranslog = translogPath.resolve(Translog.TRANSLOG_FILE_PREFIX + + translogGeneration + Translog.TRANSLOG_FILE_SUFFIX); + + // Write empty checkpoint and translog to empty files + long gen = Long.parseLong(translogGeneration); + int translogLen = writeEmptyTranslog(tempEmptyTranslog, translogUUID); + writeEmptyCheckpoint(tempEmptyCheckpoint, translogLen, gen); + + terminal.println("Removing existing translog files"); + IOUtils.rm(translogFiles.toArray(new Path[]{})); + + terminal.println("Creating new empty checkpoint at [" + realEmptyCheckpoint + "]"); + Files.move(tempEmptyCheckpoint, realEmptyCheckpoint, StandardCopyOption.ATOMIC_MOVE); + terminal.println("Creating new empty translog at [" + realEmptyTranslog + "]"); + Files.move(tempEmptyTranslog, realEmptyTranslog, StandardCopyOption.ATOMIC_MOVE); + + // Fsync the translog directory after rename + IOUtils.fsync(translogPath, true); + + } catch (LockObtainFailedException lofe) { + throw new ElasticsearchException("Failed to lock shard's directory at [" + idxLocation + "], is Elasticsearch still running?"); + } + + terminal.println("Done."); + } + + /** Write a checkpoint file to the given location with the given generation */ + public static void writeEmptyCheckpoint(Path filename, int translogLength, long translogGeneration) throws IOException { + try (FileChannel fc = FileChannel.open(filename, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW); + OutputStreamDataOutput out = new OutputStreamDataOutput(Channels.newOutputStream(fc))) { + Checkpoint emptyCheckpoint = new Checkpoint(translogLength, 0, translogGeneration); + emptyCheckpoint.write(out); + fc.force(true); + } + } + + /** + * Write a translog containing the given translog UUID to the given location. Returns the number of bytes written. + */ + public static int writeEmptyTranslog(Path filename, String translogUUID) throws IOException { + final BytesRef translogRef = new BytesRef(translogUUID); + try (FileChannel fc = FileChannel.open(filename, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW); + OutputStreamDataOutput out = new OutputStreamDataOutput(Channels.newOutputStream(fc))) { + TranslogWriter.writeHeader(out, translogRef); + fc.force(true); + } + return TranslogWriter.getHeaderLength(translogRef.length); + } + + /** Show a warning about deleting files, asking for a confirmation if {@code batchMode} is false */ + public static void warnAboutDeletingFiles(Terminal terminal, Set files, boolean batchMode) { + terminal.println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); + terminal.println("! WARNING: Elasticsearch MUST be stopped before running this tool !"); + terminal.println("! !"); + terminal.println("! WARNING: Documents inside of translog files will be lost !"); + terminal.println("! !"); + terminal.println("! WARNING: The following files will be DELETED! !"); + terminal.println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); + for (Path file : files) { + terminal.println("--> " + file); + } + terminal.println(""); + if (batchMode == false) { + String text = terminal.readText("Continue and DELETE files? [y/N] "); + if (!text.equalsIgnoreCase("y")) { + throw new ElasticsearchException("aborted by user"); + } + } + } + + /** Return a Set of all files in a given directory */ + public static Set filesInDirectory(Path directory) throws IOException { + Set files = new HashSet<>(); + try (DirectoryStream stream = Files.newDirectoryStream(directory)) { + for (Path file : stream) { + files.add(file); + } + } + return files; + } + +} diff --git a/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java new file mode 100644 index 00000000000..f6a28169898 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java @@ -0,0 +1,236 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.translog; + +import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import joptsimple.OptionParser; +import joptsimple.OptionSet; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.Priority; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.MockEngineFactoryPlugin; +import org.elasticsearch.index.translog.TruncateTranslogCommand; +import org.elasticsearch.monitor.fs.FsInfo; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.engine.MockEngineSupport; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; +import org.elasticsearch.test.transport.MockTransportService; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 0) +public class TruncateTranslogIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(MockTransportService.TestPlugin.class, MockEngineFactoryPlugin.class); + } + + public void testCorruptTranslogTruncation() throws Exception { + internalCluster().startNodesAsync(1, Settings.EMPTY).get(); + + assertAcked(prepareCreate("test").setSettings(Settings.builder() + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 0) + .put("index.refresh_interval", "-1") + .put(MockEngineSupport.DISABLE_FLUSH_ON_CLOSE.getKey(), true) // never flush - always recover from translog + )); + ensureYellow(); + + // Index some documents + int numDocs = scaledRandomIntBetween(100, 1000); + IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < builders.length; i++) { + builders[i] = client().prepareIndex("test", "type").setSource("foo", "bar"); + } + disableTranslogFlush("test"); + indexRandom(false, false, false, Arrays.asList(builders)); + Set translogDirs = getTranslogDirs("test"); + + TruncateTranslogCommand ttc = new TruncateTranslogCommand(); + MockTerminal t = new MockTerminal(); + OptionParser parser = ttc.getParser(); + + for (Path translogDir : translogDirs) { + OptionSet options = parser.parse("-d", translogDir.toAbsolutePath().toString(), "-b"); + // Try running it before the shard is closed, it should flip out because it can't acquire the lock + try { + logger.info("--> running truncate while index is open on [{}]", translogDir.toAbsolutePath()); + ttc.execute(t, options, new HashMap()); + fail("expected the truncate command to fail not being able to acquire the lock"); + } catch (Exception e) { + assertThat(e.getMessage(), containsString("Failed to lock shard's directory")); + } + } + + // Corrupt the translog file(s) + logger.info("--> corrupting translog"); + corruptRandomTranslogFiles("test"); + + // Restart the single node + logger.info("--> restarting node"); + internalCluster().fullRestart(); + client().admin().cluster().prepareHealth().setWaitForYellowStatus() + .setTimeout(new TimeValue(1000, TimeUnit.MILLISECONDS)) + .setWaitForEvents(Priority.LANGUID) + .get(); + + try { + client().prepareSearch("test").setQuery(matchAllQuery()).get(); + fail("all shards should be failed due to a corrupted translog"); + } catch (SearchPhaseExecutionException e) { + // Good, all shards should be failed because there is only a + // single shard and its translog is corrupt + } + + // Close the index so we can actually truncate the translog + logger.info("--> closing 'test' index"); + client().admin().indices().prepareClose("test").get(); + + for (Path translogDir : translogDirs) { + OptionSet options = parser.parse("-d", translogDir.toAbsolutePath().toString(), "-b"); + logger.info("--> running truncate translog command for [{}]", translogDir.toAbsolutePath()); + ttc.execute(t, options, new HashMap()); + logger.info("--> output:\n{}", t.getOutput()); + } + + // Re-open index + logger.info("--> opening 'test' index"); + client().admin().indices().prepareOpen("test").get(); + ensureYellow("test"); + + // Run a search and make sure it succeeds + SearchResponse resp = client().prepareSearch("test").setQuery(matchAllQuery()).get(); + ElasticsearchAssertions.assertNoFailures(resp); + } + + private Set getTranslogDirs(String indexName) throws IOException { + ClusterState state = client().admin().cluster().prepareState().get().getState(); + GroupShardsIterator shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[]{indexName}, false); + final Index idx = state.metaData().index(indexName).getIndex(); + List iterators = iterableAsArrayList(shardIterators); + ShardIterator shardIterator = RandomPicks.randomFrom(random(), iterators); + ShardRouting shardRouting = shardIterator.nextOrNull(); + assertNotNull(shardRouting); + assertTrue(shardRouting.primary()); + assertTrue(shardRouting.assignedToNode()); + String nodeId = shardRouting.currentNodeId(); + NodesStatsResponse nodeStatses = client().admin().cluster().prepareNodesStats(nodeId).setFs(true).get(); + Set translogDirs = new TreeSet<>(); // treeset makes sure iteration order is deterministic + for (FsInfo.Path fsPath : nodeStatses.getNodes().get(0).getFs()) { + String path = fsPath.getPath(); + final String relativeDataLocationPath = "indices/"+ idx.getUUID() +"/" + Integer.toString(shardRouting.getId()) + "/translog"; + Path translogPath = PathUtils.get(path).resolve(relativeDataLocationPath); + if (Files.isDirectory(translogPath)) { + translogDirs.add(translogPath); + } + } + return translogDirs; + } + + private void corruptRandomTranslogFiles(String indexName) throws IOException { + Set translogDirs = getTranslogDirs(indexName); + Set files = new TreeSet<>(); // treeset makes sure iteration order is deterministic + for (Path translogDir : translogDirs) { + if (Files.isDirectory(translogDir)) { + logger.info("--> path: {}", translogDir); + try (DirectoryStream stream = Files.newDirectoryStream(translogDir)) { + for (Path item : stream) { + logger.info("--> File: {}", item); + if (Files.isRegularFile(item) && item.getFileName().toString().startsWith("translog-")) { + files.add(item); + } + } + } + } + } + Path fileToCorrupt = null; + if (!files.isEmpty()) { + int corruptions = randomIntBetween(5, 20); + for (int i = 0; i < corruptions; i++) { + fileToCorrupt = RandomPicks.randomFrom(random(), files); + try (FileChannel raf = FileChannel.open(fileToCorrupt, StandardOpenOption.READ, StandardOpenOption.WRITE)) { + // read + raf.position(randomIntBetween(0, (int) Math.min(Integer.MAX_VALUE, raf.size() - 1))); + long filePointer = raf.position(); + ByteBuffer bb = ByteBuffer.wrap(new byte[1]); + raf.read(bb); + bb.flip(); + + // corrupt + byte oldValue = bb.get(0); + byte newValue = (byte) (oldValue + 1); + bb.put(0, newValue); + + // rewrite + raf.position(filePointer); + raf.write(bb); + logger.info("--> corrupting file {} -- flipping at position {} from {} to {} file: {}", + fileToCorrupt, filePointer, Integer.toHexString(oldValue), + Integer.toHexString(newValue), fileToCorrupt); + } + } + } + assertThat("no file corrupted", fileToCorrupt, notNullValue()); + } + + /** Disables translog flushing for the specified index */ + private static void disableTranslogFlush(String index) { + Settings settings = Settings.builder() + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) + .build(); + client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); + } + +} diff --git a/distribution/src/main/resources/bin/elasticsearch-translog b/distribution/src/main/resources/bin/elasticsearch-translog new file mode 100755 index 00000000000..47a48f02b47 --- /dev/null +++ b/distribution/src/main/resources/bin/elasticsearch-translog @@ -0,0 +1,90 @@ +#!/bin/bash + +CDPATH="" +SCRIPT="$0" + +# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path. +while [ -h "$SCRIPT" ] ; do + ls=`ls -ld "$SCRIPT"` + # Drop everything prior to -> + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + SCRIPT="$link" + else + SCRIPT=`dirname "$SCRIPT"`/"$link" + fi +done + +# determine elasticsearch home +ES_HOME=`dirname "$SCRIPT"`/.. + +# make ELASTICSEARCH_HOME absolute +ES_HOME=`cd "$ES_HOME"; pwd` + + +# Sets the default values for elasticsearch variables used in this script +if [ -z "$CONF_DIR" ]; then + CONF_DIR="${path.conf}" +fi + +# The default env file is defined at building/packaging time. +# For a ${project.name} package, the value is "${path.env}". +ES_ENV_FILE="${path.env}" + +# If an include is specified with the ES_INCLUDE environment variable, use it +if [ -n "$ES_INCLUDE" ]; then + ES_ENV_FILE="$ES_INCLUDE" +fi + +# Source the environment file +if [ -n "$ES_ENV_FILE" ]; then + + # If the ES_ENV_FILE is not found, try to resolve the path + # against the ES_HOME directory + if [ ! -f "$ES_ENV_FILE" ]; then + ES_ENV_FILE="$ELASTIC_HOME/$ES_ENV_FILE" + fi + + . "$ES_ENV_FILE" + if [ $? -ne 0 ]; then + echo "Unable to source environment file: $ES_ENV_FILE" >&2 + exit 1 + fi +fi + +# don't let JAVA_TOOL_OPTIONS slip in (e.g. crazy agents in ubuntu) +# works around https://bugs.launchpad.net/ubuntu/+source/jayatana/+bug/1441487 +if [ "x$JAVA_TOOL_OPTIONS" != "x" ]; then + echo "Warning: Ignoring JAVA_TOOL_OPTIONS=$JAVA_TOOL_OPTIONS" + unset JAVA_TOOL_OPTIONS +fi + +# CONF_FILE setting was removed +if [ ! -z "$CONF_FILE" ]; then + echo "CONF_FILE setting is no longer supported. elasticsearch.yml must be placed in the config directory and cannot be renamed." + exit 1 +fi + +if [ -x "$JAVA_HOME/bin/java" ]; then + JAVA=$JAVA_HOME/bin/java +else + JAVA=`which java` +fi + +if [ ! -x "$JAVA" ]; then + echo "Could not find any executable java binary. Please install java in your PATH or set JAVA_HOME" + exit 1 +fi + +# full hostname passed through cut for portability on systems that do not support hostname -s +# export on separate line for shells that do not support combining definition and export +HOSTNAME=`hostname | cut -d. -f1` +export HOSTNAME + +declare -a args=("$@") + +if [ -e "$CONF_DIR" ]; then + args=("${args[@]}" -Edefault.path.conf="$CONF_DIR") +fi + +exec "$JAVA" $ES_JAVA_OPTS -Delasticsearch -Des.path.home="$ES_HOME" -cp "$ES_HOME/lib/*" org.elasticsearch.index.translog.TranslogToolCli "${args[@]}" diff --git a/distribution/src/main/resources/bin/elasticsearch-translog.bat b/distribution/src/main/resources/bin/elasticsearch-translog.bat new file mode 100644 index 00000000000..636a6665c03 --- /dev/null +++ b/distribution/src/main/resources/bin/elasticsearch-translog.bat @@ -0,0 +1,61 @@ +@echo off + +SETLOCAL enabledelayedexpansion + +IF DEFINED JAVA_HOME ( + set JAVA=%JAVA_HOME%\bin\java.exe +) ELSE ( + FOR %%I IN (java.exe) DO set JAVA=%%~$PATH:I +) +IF NOT EXIST "%JAVA%" ( + ECHO Could not find any executable java binary. Please install java in your PATH or set JAVA_HOME 1>&2 + EXIT /B 1 +) + +set SCRIPT_DIR=%~dp0 +for %%I in ("%SCRIPT_DIR%..") do set ES_HOME=%%~dpfI + +TITLE Elasticsearch Plugin Manager ${project.version} + +SET properties= +SET args= + +:loop +SET "current=%~1" +SHIFT +IF "x!current!" == "x" GOTO breakloop + +IF "!current:~0,2%!" == "-D" ( + ECHO "!current!" | FINDSTR /C:"=">nul && ( + :: current matches -D*=* + IF "x!properties!" NEQ "x" ( + SET properties=!properties! "!current!" + ) ELSE ( + SET properties="!current!" + ) + ) || ( + :: current matches -D* + IF "x!properties!" NEQ "x" ( + SET properties=!properties! "!current!=%~1" + ) ELSE ( + SET properties="!current!=%~1" + ) + SHIFT + ) +) ELSE ( + :: current matches * + IF "x!args!" NEQ "x" ( + SET args=!args! "!current!" + ) ELSE ( + SET args="!current!" + ) +) + +GOTO loop +:breakloop + +SET HOSTNAME=%COMPUTERNAME% + +"%JAVA%" %ES_JAVA_OPTS% -Des.path.home="%ES_HOME%" !properties! -cp "%ES_HOME%/lib/*;" "org.elasticsearch.index.translog.TranslogToolCli" !args! + +ENDLOCAL From 8f2882a442fb6deaab5b645dfc80577193dfb81f Mon Sep 17 00:00:00 2001 From: Alexander Lin Date: Tue, 26 Jul 2016 08:16:19 -0700 Subject: [PATCH 74/93] Add _operation field to index, update, delete responses Performing the bulk request shown in #19267 now results in the following: ``` {"_index":"test","_type":"test","_id":"1","_version":1,"_operation":"create","forced_refresh":false,"_shards":{"total":2,"successful":1,"failed":0},"status":201} {"_index":"test","_type":"test","_id":"1","_version":1,"_operation":"noop","forced_refresh":false,"_shards":{"total":2,"successful":1,"failed":0},"status":200} ``` --- .../action/DocWriteResponse.java | 75 ++++++++++++++++--- .../action/bulk/TransportShardBulkAction.java | 4 +- .../action/delete/DeleteResponse.java | 35 ++------- .../action/index/IndexResponse.java | 32 ++------ .../action/update/TransportUpdateAction.java | 7 +- .../action/update/UpdateHelper.java | 6 +- .../action/update/UpdateResponse.java | 39 +++++----- docs/reference/docs/bulk.asciidoc | 2 +- docs/reference/docs/delete.asciidoc | 3 +- docs/reference/docs/index_.asciidoc | 2 + docs/reference/docs/update.asciidoc | 16 +++- .../reindex/AsyncBulkByScrollActionTests.java | 4 +- .../test/delete/12_operation.yaml | 26 +++++++ .../test/index/12_operation.yaml | 21 ++++++ .../test/update/12_operation.yaml | 52 +++++++++++++ 15 files changed, 224 insertions(+), 100 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_operation.yaml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/index/12_operation.yaml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/update/12_operation.yaml diff --git a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java index 49ac5d4f8a4..95689e66d3f 100644 --- a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.StatusToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.IndexSettings; @@ -32,29 +33,83 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.util.Locale; /** * A base class for the response of a write operation that involves a single doc */ public abstract class DocWriteResponse extends ReplicationResponse implements WriteResponse, StatusToXContent { + public enum Operation implements Writeable { + CREATE(0), + INDEX(1), + DELETE(2), + NOOP(3); + + private final byte op; + private final String lowercase; + + Operation(int op) { + this.op = (byte) op; + this.lowercase = this.toString().toLowerCase(Locale.ENGLISH); + } + + public byte getOp() { + return op; + } + + public String getLowercase() { + return lowercase; + } + + public static Operation readFrom(StreamInput in) throws IOException{ + Byte opcode = in.readByte(); + switch(opcode){ + case 0: + return CREATE; + case 1: + return INDEX; + case 2: + return DELETE; + case 3: + return NOOP; + default: + throw new IllegalArgumentException("Unknown operation code: " + opcode); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeByte(op); + } + } + private ShardId shardId; private String id; private String type; private long version; private boolean forcedRefresh; + protected Operation operation; - public DocWriteResponse(ShardId shardId, String type, String id, long version) { + public DocWriteResponse(ShardId shardId, String type, String id, long version, Operation operation) { this.shardId = shardId; this.type = type; this.id = id; this.version = version; + this.operation = operation; } // needed for deserialization protected DocWriteResponse() { } + /** + * The change that occurred to the document. + */ + public Operation getOperation() { + return operation; + } + /** * The index the document was changed in. */ @@ -143,6 +198,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr id = in.readString(); version = in.readZLong(); forcedRefresh = in.readBoolean(); + operation = Operation.readFrom(in); } @Override @@ -153,22 +209,17 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr out.writeString(id); out.writeZLong(version); out.writeBoolean(forcedRefresh); - } - - static final class Fields { - static final String _INDEX = "_index"; - static final String _TYPE = "_type"; - static final String _ID = "_id"; - static final String _VERSION = "_version"; + operation.writeTo(out); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { ReplicationResponse.ShardInfo shardInfo = getShardInfo(); - builder.field(Fields._INDEX, shardId.getIndexName()) - .field(Fields._TYPE, type) - .field(Fields._ID, id) - .field(Fields._VERSION, version) + builder.field("_index", shardId.getIndexName()) + .field("_type", type) + .field("_id", id) + .field("_version", version) + .field("_operation", getOperation().getLowercase()) .field("forced_refresh", forcedRefresh); shardInfo.toXContent(builder, params); return builder; diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 45fc97dca98..cb3bb6bdea8 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -248,7 +248,7 @@ public class TransportShardBulkAction extends TransportWriteAction 0) { Tuple> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true); updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes)); @@ -261,7 +261,7 @@ public class TransportShardBulkAction extends TransportWriteAction writeResult = updateResult.writeResult; DeleteResponse response = writeResult.getResponse(); DeleteRequest deleteRequest = updateResult.request(); - updateResponse = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), false); + updateResponse = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation()); updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), response.getVersion(), updateResult.result.updatedSourceAsMap(), updateResult.result.updateSourceContentType(), null)); // Replace the update request to the translated delete request to execute on the replica. item = request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), deleteRequest); diff --git a/core/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java b/core/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java index f40c419b7ff..aed4ced897f 100644 --- a/core/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java +++ b/core/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java @@ -20,8 +20,6 @@ package org.elasticsearch.action.delete; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; @@ -36,52 +34,29 @@ import java.io.IOException; */ public class DeleteResponse extends DocWriteResponse { - private boolean found; - public DeleteResponse() { } public DeleteResponse(ShardId shardId, String type, String id, long version, boolean found) { - super(shardId, type, id, version); - this.found = found; + super(shardId, type, id, version, found ? Operation.DELETE : Operation.NOOP); } - /** * Returns true if a doc was found to delete. */ public boolean isFound() { - return found; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - found = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBoolean(found); + return operation == Operation.DELETE; } @Override public RestStatus status() { - if (found == false) { - return RestStatus.NOT_FOUND; - } - return super.status(); - } - - static final class Fields { - static final String FOUND = "found"; + return isFound() ? super.status() : RestStatus.NOT_FOUND; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(Fields.FOUND, isFound()); + builder.field("found", isFound()); super.toXContent(builder, params); return builder; } @@ -94,7 +69,7 @@ public class DeleteResponse extends DocWriteResponse { builder.append(",type=").append(getType()); builder.append(",id=").append(getId()); builder.append(",version=").append(getVersion()); - builder.append(",found=").append(found); + builder.append(",operation=").append(getOperation().getLowercase()); builder.append(",shards=").append(getShardInfo()); return builder.append("]").toString(); } diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexResponse.java b/core/src/main/java/org/elasticsearch/action/index/IndexResponse.java index b37e86ccb2c..851b6bc0e08 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexResponse.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexResponse.java @@ -36,42 +36,24 @@ import java.io.IOException; */ public class IndexResponse extends DocWriteResponse { - private boolean created; - public IndexResponse() { } public IndexResponse(ShardId shardId, String type, String id, long version, boolean created) { - super(shardId, type, id, version); - this.created = created; + super(shardId, type, id, version, created ? Operation.CREATE : Operation.INDEX); } /** * Returns true if the document was created, false if updated. */ public boolean isCreated() { - return this.created; + return this.operation == Operation.CREATE; } @Override public RestStatus status() { - if (created) { - return RestStatus.CREATED; - } - return super.status(); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - created = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBoolean(created); + return isCreated() ? RestStatus.CREATED : super.status(); } @Override @@ -82,19 +64,15 @@ public class IndexResponse extends DocWriteResponse { builder.append(",type=").append(getType()); builder.append(",id=").append(getId()); builder.append(",version=").append(getVersion()); - builder.append(",created=").append(created); + builder.append(",operation=").append(getOperation().getLowercase()); builder.append(",shards=").append(getShardInfo()); return builder.append("]").toString(); } - static final class Fields { - static final String CREATED = "created"; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { super.toXContent(builder, params); - builder.field(Fields.CREATED, isCreated()); + builder.field("created", isCreated()); return builder; } } diff --git a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 3bff01cb8a6..86d42e4db12 100644 --- a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -22,6 +22,7 @@ package org.elasticsearch.action.update; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; @@ -185,7 +186,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio indexAction.execute(upsertRequest, new ActionListener() { @Override public void onResponse(IndexResponse response) { - UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.isCreated()); + UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation()); if (request.fields() != null && request.fields().length > 0) { Tuple> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes)); @@ -223,7 +224,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio indexAction.execute(indexRequest, new ActionListener() { @Override public void onResponse(IndexResponse response) { - UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.isCreated()); + UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation()); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes)); update.setForcedRefresh(response.forcedRefresh()); listener.onResponse(update); @@ -252,7 +253,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio deleteAction.execute(deleteRequest, new ActionListener() { @Override public void onResponse(DeleteResponse response) { - UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), false); + UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation()); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null)); update.setForcedRefresh(response.forcedRefresh()); listener.onResponse(update); diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index aa92510e38b..cb876f706e0 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -116,7 +116,7 @@ public class UpdateHelper extends AbstractComponent { request.script.getScript()); } UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), - getResult.getVersion(), false); + getResult.getVersion(), UpdateResponse.convert(Operation.NONE)); update.setGetResult(getResult); return new Result(update, Operation.NONE, upsertDoc, XContentType.JSON); } @@ -234,12 +234,12 @@ public class UpdateHelper extends AbstractComponent { .setRefreshPolicy(request.getRefreshPolicy()); return new Result(deleteRequest, Operation.DELETE, updatedSourceAsMap, updateSourceContentType); } else if ("none".equals(operation)) { - UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), false); + UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), UpdateResponse.convert(Operation.NONE)); update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef())); return new Result(update, Operation.NONE, updatedSourceAsMap, updateSourceContentType); } else { logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script.getScript()); - UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), false); + UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), UpdateResponse.convert(Operation.NONE)); return new Result(update, Operation.NONE, updatedSourceAsMap, updateSourceContentType); } } diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateResponse.java b/core/src/main/java/org/elasticsearch/action/update/UpdateResponse.java index 8d2eeeb383b..16633fd4126 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateResponse.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateResponse.java @@ -29,11 +29,8 @@ import org.elasticsearch.rest.RestStatus; import java.io.IOException; -/** - */ public class UpdateResponse extends DocWriteResponse { - private boolean created; private GetResult getResult; public UpdateResponse() { @@ -43,14 +40,28 @@ public class UpdateResponse extends DocWriteResponse { * Constructor to be used when a update didn't translate in a write. * For example: update script with operation set to none */ - public UpdateResponse(ShardId shardId, String type, String id, long version, boolean created) { - this(new ShardInfo(0, 0), shardId, type, id, version, created); + public UpdateResponse(ShardId shardId, String type, String id, long version, Operation operation) { + this(new ShardInfo(0, 0), shardId, type, id, version, operation); } - public UpdateResponse(ShardInfo shardInfo, ShardId shardId, String type, String id, long version, boolean created) { - super(shardId, type, id, version); + public UpdateResponse(ShardInfo shardInfo, ShardId shardId, String type, String id, + long version, Operation operation) { + super(shardId, type, id, version, operation); setShardInfo(shardInfo); - this.created = created; + } + + public static Operation convert(UpdateHelper.Operation op) { + switch(op) { + case UPSERT: + return Operation.CREATE; + case INDEX: + return Operation.INDEX; + case DELETE: + return Operation.DELETE; + case NONE: + return Operation.NOOP; + } + throw new IllegalArgumentException(); } public void setGetResult(GetResult getResult) { @@ -65,22 +76,17 @@ public class UpdateResponse extends DocWriteResponse { * Returns true if document was created due to an UPSERT operation */ public boolean isCreated() { - return this.created; - + return this.operation == Operation.CREATE; } @Override public RestStatus status() { - if (created) { - return RestStatus.CREATED; - } - return super.status(); + return isCreated() ? RestStatus.CREATED : super.status(); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - created = in.readBoolean(); if (in.readBoolean()) { getResult = GetResult.readGetResult(in); } @@ -89,7 +95,6 @@ public class UpdateResponse extends DocWriteResponse { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeBoolean(created); if (getResult == null) { out.writeBoolean(false); } else { @@ -122,7 +127,7 @@ public class UpdateResponse extends DocWriteResponse { builder.append(",type=").append(getType()); builder.append(",id=").append(getId()); builder.append(",version=").append(getVersion()); - builder.append(",created=").append(created); + builder.append(",operation=").append(getOperation().getLowercase()); builder.append(",shards=").append(getShardInfo()); return builder.append("]").toString(); } diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index c9189b57c01..76096967de1 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -57,7 +57,7 @@ $ cat requests { "index" : { "_index" : "test", "_type" : "type1", "_id" : "1" } } { "field1" : "value1" } $ curl -s -XPOST localhost:9200/_bulk --data-binary "@requests"; echo -{"took":7,"items":[{"create":{"_index":"test","_type":"type1","_id":"1","_version":1}}]} +{"took":7, "errors": false, "items":[{"index":{"_index":"test","_type":"type1","_id":"1","_version":1,"_operation":"create","forced_refresh":false}}]} -------------------------------------------------- Because this format uses literal `\n`'s as delimiters, please be sure diff --git a/docs/reference/docs/delete.asciidoc b/docs/reference/docs/delete.asciidoc index 18a370fc416..2494605f87e 100644 --- a/docs/reference/docs/delete.asciidoc +++ b/docs/reference/docs/delete.asciidoc @@ -25,7 +25,8 @@ The result of the above delete operation is: "_index" : "twitter", "_type" : "tweet", "_id" : "1", - "_version" : 2 + "_version" : 2, + "_operation: delete" } -------------------------------------------------- diff --git a/docs/reference/docs/index_.asciidoc b/docs/reference/docs/index_.asciidoc index dda75dd5aa5..2be399e4e40 100644 --- a/docs/reference/docs/index_.asciidoc +++ b/docs/reference/docs/index_.asciidoc @@ -31,6 +31,7 @@ The result of the above index operation is: "_id" : "1", "_version" : 1, "created" : true, + "_operation" : create, "forced_refresh": false } -------------------------------------------------- @@ -231,6 +232,7 @@ The result of the above index operation is: "_id" : "6a8ca01c-7896-48e9-81cc-9f70661fcb32", "_version" : 1, "created" : true, + "_operation": "create", "forced_refresh": false } -------------------------------------------------- diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 2edb0a71a3f..f85e152f1b3 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -132,8 +132,20 @@ curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ }' -------------------------------------------------- -If `name` was `new_name` before the request was sent then document is still -reindexed. +If `name` was `new_name` before the request was sent then the entire update +request is ignored. The `operation` element in the response returns `noop` if +the request was ignored. + +[source,js] +-------------------------------------------------- +{ + "_index": "test", + "_type": "type1", + "_id": "1", + "_version": 1, + "_operation": noop +} +-------------------------------------------------- [[upserts]] [float] diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 77e792b8333..bb44c4e6b85 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -314,7 +314,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { }; ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 0, emptyList(), null); simulateScrollResponse(new DummyAbstractAsyncBulkByScrollAction(), timeValueNanos(System.nanoTime()), 10, response); - ExecutionException e = expectThrows(ExecutionException.class, () -> listener.get()); + ExecutionException e = expectThrows(ExecutionException.class, () -> listener.get()); assertThat(e.getMessage(), equalTo("EsRejectedExecutionException[test]")); assertThat(client.scrollsCleared, contains(scrollId)); @@ -773,7 +773,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { UpdateRequest update = (UpdateRequest) item; opType = "update"; response = new UpdateResponse(shardId, update.type(), update.id(), - randomIntBetween(0, Integer.MAX_VALUE), true); + randomIntBetween(0, Integer.MAX_VALUE), DocWriteResponse.Operation.CREATE); } else if (item instanceof DeleteRequest) { DeleteRequest delete = (DeleteRequest) item; opType = "delete"; diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_operation.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_operation.yaml new file mode 100644 index 00000000000..7dbc84a5078 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_operation.yaml @@ -0,0 +1,26 @@ +--- +"Delete operation field": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + - do: + delete: + index: test_1 + type: test + id: 1 + + - match: { _operation: delete } + + - do: + catch: missing + delete: + index: test_1 + type: test + id: 1 + + - match: { _operation: noop } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_operation.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_operation.yaml new file mode 100644 index 00000000000..a935bda420d --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_operation.yaml @@ -0,0 +1,21 @@ +--- +"Index operation field": + + - do: + index: + index: test_index + type: test + id: 1 + body: { foo: bar } + + - match: { _operation: create } + + - do: + index: + index: test_index + type: test + id: 1 + body: { foo: bar } + op_type: index + + - match: { _operation: index } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_operation.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_operation.yaml new file mode 100644 index 00000000000..abbb8d4a59a --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_operation.yaml @@ -0,0 +1,52 @@ +--- +"Update operation field": + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar } + doc_as_upsert: true + + - match: { _version: 1 } + - match: { _operation: create } + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar } + doc_as_upsert: true + + - match: { _version: 1 } + - match: { _operation: noop } + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar } + doc_as_upsert: true + detect_noop: false + + - match: { _version: 2 } + - match: { _operation: index } + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: baz } + doc_as_upsert: true + detect_noop: true + + - match: { _version: 3 } + - match: { _operation: index } From 8a51cfb5b350882b670ecfd826d9da296e953604 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 26 Jul 2016 17:28:37 +0200 Subject: [PATCH 75/93] Simplify Sniffer initialization and automatically create the default HostsSniffer (#19599) Simplify Sniffer initialization and automatically create the default HostsSniffer Take Sniffer.Builder out to its own top level class. Remove HostsSniffer.Builder and let SnifferBuilder create the default HostsSniffer. This simplifies the Sniffer initialization as the HostsSniffer is not mandatory anymore. It can still be specified though in case the configuration needs to be changed or a different impl has to be used. Also make HostsSniffer an interface. --- .../sniff/ElasticsearchHostsSniffer.java | 170 ++++++++++++++++++ .../client/sniff/HostsSniffer.java | 166 +---------------- .../elasticsearch/client/sniff/Sniffer.java | 75 ++------ .../client/sniff/SnifferBuilder.java | 91 ++++++++++ ...va => ElasticsearchHostsSnifferTests.java} | 39 +++- .../sniff/HostsSnifferBuilderTests.java | 73 -------- .../client/sniff/MockHostsSniffer.java | 15 +- .../sniff/SniffOnFailureListenerTests.java | 2 +- .../client/sniff/SnifferBuilderTests.java | 34 ++-- 9 files changed, 331 insertions(+), 334 deletions(-) create mode 100644 client/sniffer/src/main/java/org/elasticsearch/client/sniff/ElasticsearchHostsSniffer.java create mode 100644 client/sniffer/src/main/java/org/elasticsearch/client/sniff/SnifferBuilder.java rename client/sniffer/src/test/java/org/elasticsearch/client/sniff/{HostsSnifferTests.java => ElasticsearchHostsSnifferTests.java} (87%) delete mode 100644 client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferBuilderTests.java diff --git a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/ElasticsearchHostsSniffer.java b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/ElasticsearchHostsSniffer.java new file mode 100644 index 00000000000..34a49883586 --- /dev/null +++ b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/ElasticsearchHostsSniffer.java @@ -0,0 +1,170 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.sniff; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.http.HttpEntity; +import org.apache.http.HttpHost; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +/** + * Class responsible for sniffing the http hosts from elasticsearch through the nodes info api and returning them back. + * Compatible with elasticsearch 5.x and 2.x. + */ +public final class ElasticsearchHostsSniffer implements HostsSniffer { + + private static final Log logger = LogFactory.getLog(ElasticsearchHostsSniffer.class); + + public static final long DEFAULT_SNIFF_REQUEST_TIMEOUT = TimeUnit.SECONDS.toMillis(1); + + private final RestClient restClient; + private final Map sniffRequestParams; + private final Scheme scheme; + private final JsonFactory jsonFactory = new JsonFactory(); + + /** + * Creates a new instance of the Elasticsearch sniffer. It will use the provided {@link RestClient} to fetch the hosts, + * through the nodes info api, the default sniff request timeout value {@link #DEFAULT_SNIFF_REQUEST_TIMEOUT} and http + * as the scheme for all the hosts. + * @param restClient client used to fetch the hosts from elasticsearch through nodes info api. Usually the same instance + * that is also provided to {@link Sniffer#builder(RestClient)}, so that the hosts are set to the same + * client that was used to fetch them. + */ + public ElasticsearchHostsSniffer(RestClient restClient) { + this(restClient, DEFAULT_SNIFF_REQUEST_TIMEOUT, ElasticsearchHostsSniffer.Scheme.HTTP); + } + + /** + * Creates a new instance of the Elasticsearch sniffer. It will use the provided {@link RestClient} to fetch the hosts + * through the nodes info api, the provided sniff request timeout value and scheme. + * @param restClient client used to fetch the hosts from elasticsearch through nodes info api. Usually the same instance + * that is also provided to {@link Sniffer#builder(RestClient)}, so that the hosts are set to the same + * client that was used to sniff them. + * @param sniffRequestTimeoutMillis the sniff request timeout (in milliseconds) to be passed in as a query string parameter + * to elasticsearch. Allows to halt the request without any failure, as only the nodes + * that have responded within this timeout will be returned. + * @param scheme the scheme to associate sniffed nodes with (as it is not returned by elasticsearch) + */ + public ElasticsearchHostsSniffer(RestClient restClient, long sniffRequestTimeoutMillis, Scheme scheme) { + this.restClient = Objects.requireNonNull(restClient, "restClient cannot be null"); + if (sniffRequestTimeoutMillis < 0) { + throw new IllegalArgumentException("sniffRequestTimeoutMillis must be greater than 0"); + } + this.sniffRequestParams = Collections.singletonMap("timeout", sniffRequestTimeoutMillis + "ms"); + this.scheme = Objects.requireNonNull(scheme, "scheme cannot be null"); + } + + /** + * Calls the elasticsearch nodes info api, parses the response and returns all the found http hosts + */ + public List sniffHosts() throws IOException { + Response response = restClient.performRequest("get", "/_nodes/http", sniffRequestParams); + return readHosts(response.getEntity()); + } + + private List readHosts(HttpEntity entity) throws IOException { + try (InputStream inputStream = entity.getContent()) { + JsonParser parser = jsonFactory.createParser(inputStream); + if (parser.nextToken() != JsonToken.START_OBJECT) { + throw new IOException("expected data to start with an object"); + } + List hosts = new ArrayList<>(); + while (parser.nextToken() != JsonToken.END_OBJECT) { + if (parser.getCurrentToken() == JsonToken.START_OBJECT) { + if ("nodes".equals(parser.getCurrentName())) { + while (parser.nextToken() != JsonToken.END_OBJECT) { + JsonToken token = parser.nextToken(); + assert token == JsonToken.START_OBJECT; + String nodeId = parser.getCurrentName(); + HttpHost sniffedHost = readHost(nodeId, parser, this.scheme); + if (sniffedHost != null) { + logger.trace("adding node [" + nodeId + "]"); + hosts.add(sniffedHost); + } + } + } else { + parser.skipChildren(); + } + } + } + return hosts; + } + } + + private static HttpHost readHost(String nodeId, JsonParser parser, Scheme scheme) throws IOException { + HttpHost httpHost = null; + String fieldName = null; + while (parser.nextToken() != JsonToken.END_OBJECT) { + if (parser.getCurrentToken() == JsonToken.FIELD_NAME) { + fieldName = parser.getCurrentName(); + } else if (parser.getCurrentToken() == JsonToken.START_OBJECT) { + if ("http".equals(fieldName)) { + while (parser.nextToken() != JsonToken.END_OBJECT) { + if (parser.getCurrentToken() == JsonToken.VALUE_STRING && "publish_address".equals(parser.getCurrentName())) { + URI boundAddressAsURI = URI.create(scheme + "://" + parser.getValueAsString()); + httpHost = new HttpHost(boundAddressAsURI.getHost(), boundAddressAsURI.getPort(), + boundAddressAsURI.getScheme()); + } else if (parser.getCurrentToken() == JsonToken.START_OBJECT) { + parser.skipChildren(); + } + } + } else { + parser.skipChildren(); + } + } + } + //http section is not present if http is not enabled on the node, ignore such nodes + if (httpHost == null) { + logger.debug("skipping node [" + nodeId + "] with http disabled"); + return null; + } + return httpHost; + } + + public enum Scheme { + HTTP("http"), HTTPS("https"); + + private final String name; + + Scheme(String name) { + this.name = name; + } + + @Override + public String toString() { + return name; + } + } +} diff --git a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/HostsSniffer.java b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/HostsSniffer.java index d5b2c67ee02..9eb7b344259 100644 --- a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/HostsSniffer.java +++ b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/HostsSniffer.java @@ -19,175 +19,17 @@ package org.elasticsearch.client.sniff; -import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonToken; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.http.HttpEntity; import org.apache.http.HttpHost; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.RestClient; import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.util.ArrayList; -import java.util.Collections; import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.TimeUnit; /** - * Class responsible for sniffing the http hosts from elasticsearch through the nodes info api and returning them back. - * Compatible with elasticsearch 5.x and 2.x. + * Responsible for sniffing the http hosts */ -public class HostsSniffer { - - private static final Log logger = LogFactory.getLog(HostsSniffer.class); - - private final RestClient restClient; - private final Map sniffRequestParams; - private final Scheme scheme; - private final JsonFactory jsonFactory = new JsonFactory(); - - protected HostsSniffer(RestClient restClient, long sniffRequestTimeoutMillis, Scheme scheme) { - this.restClient = restClient; - this.sniffRequestParams = Collections.singletonMap("timeout", sniffRequestTimeoutMillis + "ms"); - this.scheme = scheme; - } - +public interface HostsSniffer { /** - * Calls the elasticsearch nodes info api, parses the response and returns all the found http hosts + * Returns the sniffed http hosts */ - public List sniffHosts() throws IOException { - Response response = restClient.performRequest("get", "/_nodes/http", sniffRequestParams); - return readHosts(response.getEntity()); - } - - private List readHosts(HttpEntity entity) throws IOException { - try (InputStream inputStream = entity.getContent()) { - JsonParser parser = jsonFactory.createParser(inputStream); - if (parser.nextToken() != JsonToken.START_OBJECT) { - throw new IOException("expected data to start with an object"); - } - List hosts = new ArrayList<>(); - while (parser.nextToken() != JsonToken.END_OBJECT) { - if (parser.getCurrentToken() == JsonToken.START_OBJECT) { - if ("nodes".equals(parser.getCurrentName())) { - while (parser.nextToken() != JsonToken.END_OBJECT) { - JsonToken token = parser.nextToken(); - assert token == JsonToken.START_OBJECT; - String nodeId = parser.getCurrentName(); - HttpHost sniffedHost = readHost(nodeId, parser, this.scheme); - if (sniffedHost != null) { - logger.trace("adding node [" + nodeId + "]"); - hosts.add(sniffedHost); - } - } - } else { - parser.skipChildren(); - } - } - } - return hosts; - } - } - - private static HttpHost readHost(String nodeId, JsonParser parser, Scheme scheme) throws IOException { - HttpHost httpHost = null; - String fieldName = null; - while (parser.nextToken() != JsonToken.END_OBJECT) { - if (parser.getCurrentToken() == JsonToken.FIELD_NAME) { - fieldName = parser.getCurrentName(); - } else if (parser.getCurrentToken() == JsonToken.START_OBJECT) { - if ("http".equals(fieldName)) { - while (parser.nextToken() != JsonToken.END_OBJECT) { - if (parser.getCurrentToken() == JsonToken.VALUE_STRING && "publish_address".equals(parser.getCurrentName())) { - URI boundAddressAsURI = URI.create(scheme + "://" + parser.getValueAsString()); - httpHost = new HttpHost(boundAddressAsURI.getHost(), boundAddressAsURI.getPort(), - boundAddressAsURI.getScheme()); - } else if (parser.getCurrentToken() == JsonToken.START_OBJECT) { - parser.skipChildren(); - } - } - } else { - parser.skipChildren(); - } - } - } - //http section is not present if http is not enabled on the node, ignore such nodes - if (httpHost == null) { - logger.debug("skipping node [" + nodeId + "] with http disabled"); - return null; - } - return httpHost; - } - - /** - * Returns a new {@link Builder} to help with {@link HostsSniffer} creation. - */ - public static Builder builder(RestClient restClient) { - return new Builder(restClient); - } - - public enum Scheme { - HTTP("http"), HTTPS("https"); - - private final String name; - - Scheme(String name) { - this.name = name; - } - - @Override - public String toString() { - return name; - } - } - - /** - * HostsSniffer builder. Helps creating a new {@link HostsSniffer}. - */ - public static class Builder { - public static final long DEFAULT_SNIFF_REQUEST_TIMEOUT = TimeUnit.SECONDS.toMillis(1); - - private final RestClient restClient; - private long sniffRequestTimeoutMillis = DEFAULT_SNIFF_REQUEST_TIMEOUT; - private Scheme scheme = Scheme.HTTP; - - private Builder(RestClient restClient) { - Objects.requireNonNull(restClient, "restClient cannot be null"); - this.restClient = restClient; - } - - /** - * Sets the sniff request timeout (in milliseconds) to be passed in as a query string parameter to elasticsearch. - * Allows to halt the request without any failure, as only the nodes that have responded within this timeout will be returned. - */ - public Builder setSniffRequestTimeoutMillis(int sniffRequestTimeoutMillis) { - if (sniffRequestTimeoutMillis <= 0) { - throw new IllegalArgumentException("sniffRequestTimeoutMillis must be greater than 0"); - } - this.sniffRequestTimeoutMillis = sniffRequestTimeoutMillis; - return this; - } - - /** - * Sets the scheme to associate sniffed nodes with (as it is not returned by elasticsearch) - */ - public Builder setScheme(Scheme scheme) { - Objects.requireNonNull(scheme, "scheme cannot be null"); - this.scheme = scheme; - return this; - } - - /** - * Creates a new {@link HostsSniffer} instance given the provided configuration - */ - public HostsSniffer build() { - return new HostsSniffer(restClient, sniffRequestTimeoutMillis, scheme); - } - } + List sniffHosts() throws IOException; } diff --git a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/Sniffer.java b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/Sniffer.java index 61fa32e7b62..247206bbbc9 100644 --- a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/Sniffer.java +++ b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/Sniffer.java @@ -28,7 +28,6 @@ import org.elasticsearch.client.RestClientBuilder; import java.io.Closeable; import java.io.IOException; import java.util.List; -import java.util.Objects; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; @@ -36,12 +35,12 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; /** - * Class responsible for sniffing nodes from an elasticsearch cluster and setting them to a provided instance of {@link RestClient}. - * Must be created via {@link Builder}, which allows to set all of the different options or rely on defaults. + * Class responsible for sniffing nodes from some source (default is elasticsearch itself) and setting them to a provided instance of + * {@link RestClient}. Must be created via {@link SnifferBuilder}, which allows to set all of the different options or rely on defaults. * A background task fetches the nodes through the {@link HostsSniffer} and sets them to the {@link RestClient} instance. * It is possible to perform sniffing on failure by creating a {@link SniffOnFailureListener} and providing it as an argument to - * {@link RestClientBuilder#setFailureListener(RestClient.FailureListener)}. The Sniffer implementation - * needs to be lazily set to the previously created SniffOnFailureListener through {@link SniffOnFailureListener#setSniffer(Sniffer)}. + * {@link RestClientBuilder#setFailureListener(RestClient.FailureListener)}. The Sniffer implementation needs to be lazily set to the + * previously created SniffOnFailureListener through {@link SniffOnFailureListener#setSniffer(Sniffer)}. */ public final class Sniffer implements Closeable { @@ -49,7 +48,7 @@ public final class Sniffer implements Closeable { private final Task task; - private Sniffer(RestClient restClient, HostsSniffer hostsSniffer, long sniffInterval, long sniffAfterFailureDelay) { + Sniffer(RestClient restClient, HostsSniffer hostsSniffer, long sniffInterval, long sniffAfterFailureDelay) { this.task = new Task(hostsSniffer, restClient, sniffInterval, sniffAfterFailureDelay); } @@ -144,64 +143,12 @@ public final class Sniffer implements Closeable { } /** - * Returns a new {@link Builder} to help with {@link Sniffer} creation. + * Returns a new {@link SnifferBuilder} to help with {@link Sniffer} creation. + * + * @param restClient the client that gets its hosts set (via {@link RestClient#setHosts(HttpHost...)}) once they are fetched + * @return a new instance of {@link SnifferBuilder} */ - public static Builder builder(RestClient restClient, HostsSniffer hostsSniffer) { - return new Builder(restClient, hostsSniffer); - } - - /** - * Sniffer builder. Helps creating a new {@link Sniffer}. - */ - public static final class Builder { - public static final long DEFAULT_SNIFF_INTERVAL = TimeUnit.MINUTES.toMillis(5); - public static final long DEFAULT_SNIFF_AFTER_FAILURE_DELAY = TimeUnit.MINUTES.toMillis(1); - - private final RestClient restClient; - private final HostsSniffer hostsSniffer; - private long sniffIntervalMillis = DEFAULT_SNIFF_INTERVAL; - private long sniffAfterFailureDelayMillis = DEFAULT_SNIFF_AFTER_FAILURE_DELAY; - - /** - * Creates a new builder instance by providing the {@link RestClient} that will be used to communicate with elasticsearch, - * and the - */ - private Builder(RestClient restClient, HostsSniffer hostsSniffer) { - Objects.requireNonNull(restClient, "restClient cannot be null"); - this.restClient = restClient; - Objects.requireNonNull(hostsSniffer, "hostsSniffer cannot be null"); - this.hostsSniffer = hostsSniffer; - } - - /** - * Sets the interval between consecutive ordinary sniff executions in milliseconds. Will be honoured when - * sniffOnFailure is disabled or when there are no failures between consecutive sniff executions. - * @throws IllegalArgumentException if sniffIntervalMillis is not greater than 0 - */ - public Builder setSniffIntervalMillis(int sniffIntervalMillis) { - if (sniffIntervalMillis <= 0) { - throw new IllegalArgumentException("sniffIntervalMillis must be greater than 0"); - } - this.sniffIntervalMillis = sniffIntervalMillis; - return this; - } - - /** - * Sets the delay of a sniff execution scheduled after a failure (in milliseconds) - */ - public Builder setSniffAfterFailureDelayMillis(int sniffAfterFailureDelayMillis) { - if (sniffAfterFailureDelayMillis <= 0) { - throw new IllegalArgumentException("sniffAfterFailureDelayMillis must be greater than 0"); - } - this.sniffAfterFailureDelayMillis = sniffAfterFailureDelayMillis; - return this; - } - - /** - * Creates the {@link Sniffer} based on the provided configuration. - */ - public Sniffer build() { - return new Sniffer(restClient, hostsSniffer, sniffIntervalMillis, sniffAfterFailureDelayMillis); - } + public static SnifferBuilder builder(RestClient restClient) { + return new SnifferBuilder(restClient); } } diff --git a/client/sniffer/src/main/java/org/elasticsearch/client/sniff/SnifferBuilder.java b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/SnifferBuilder.java new file mode 100644 index 00000000000..010a8a4a78d --- /dev/null +++ b/client/sniffer/src/main/java/org/elasticsearch/client/sniff/SnifferBuilder.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.sniff; + +import org.elasticsearch.client.RestClient; + +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +/** + * Sniffer builder. Helps creating a new {@link Sniffer}. + */ +public final class SnifferBuilder { + public static final long DEFAULT_SNIFF_INTERVAL = TimeUnit.MINUTES.toMillis(5); + public static final long DEFAULT_SNIFF_AFTER_FAILURE_DELAY = TimeUnit.MINUTES.toMillis(1); + + private final RestClient restClient; + private long sniffIntervalMillis = DEFAULT_SNIFF_INTERVAL; + private long sniffAfterFailureDelayMillis = DEFAULT_SNIFF_AFTER_FAILURE_DELAY; + private HostsSniffer hostsSniffer; + + /** + * Creates a new builder instance by providing the {@link RestClient} that will be used to communicate with elasticsearch + */ + SnifferBuilder(RestClient restClient) { + Objects.requireNonNull(restClient, "restClient cannot be null"); + this.restClient = restClient; + } + + /** + * Sets the interval between consecutive ordinary sniff executions in milliseconds. Will be honoured when + * sniffOnFailure is disabled or when there are no failures between consecutive sniff executions. + * @throws IllegalArgumentException if sniffIntervalMillis is not greater than 0 + */ + public SnifferBuilder setSniffIntervalMillis(int sniffIntervalMillis) { + if (sniffIntervalMillis <= 0) { + throw new IllegalArgumentException("sniffIntervalMillis must be greater than 0"); + } + this.sniffIntervalMillis = sniffIntervalMillis; + return this; + } + + /** + * Sets the delay of a sniff execution scheduled after a failure (in milliseconds) + */ + public SnifferBuilder setSniffAfterFailureDelayMillis(int sniffAfterFailureDelayMillis) { + if (sniffAfterFailureDelayMillis <= 0) { + throw new IllegalArgumentException("sniffAfterFailureDelayMillis must be greater than 0"); + } + this.sniffAfterFailureDelayMillis = sniffAfterFailureDelayMillis; + return this; + } + + /** + * Sets the {@link HostsSniffer} to be used to read hosts. A default instance of {@link ElasticsearchHostsSniffer} + * is created when not provided. This method can be used to change the configuration of the {@link ElasticsearchHostsSniffer}, + * or to provide a different implementation (e.g. in case hosts need to taken from a different source). + */ + public SnifferBuilder setHostsSniffer(HostsSniffer hostsSniffer) { + Objects.requireNonNull(hostsSniffer, "hostsSniffer cannot be null"); + this.hostsSniffer = hostsSniffer; + return this; + } + + /** + * Creates the {@link Sniffer} based on the provided configuration. + */ + public Sniffer build() { + if (hostsSniffer == null) { + this.hostsSniffer = new ElasticsearchHostsSniffer(restClient); + } + return new Sniffer(restClient, hostsSniffer, sniffIntervalMillis, sniffAfterFailureDelayMillis); + } +} diff --git a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferTests.java b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/ElasticsearchHostsSnifferTests.java similarity index 87% rename from client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferTests.java rename to client/sniffer/src/test/java/org/elasticsearch/client/sniff/ElasticsearchHostsSnifferTests.java index bb375834097..a926cabb87d 100644 --- a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferTests.java +++ b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/ElasticsearchHostsSnifferTests.java @@ -60,17 +60,17 @@ import static org.junit.Assert.fail; //animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes @IgnoreJRERequirement -public class HostsSnifferTests extends RestClientTestCase { +public class ElasticsearchHostsSnifferTests extends RestClientTestCase { private int sniffRequestTimeout; - private HostsSniffer.Scheme scheme; + private ElasticsearchHostsSniffer.Scheme scheme; private SniffResponse sniffResponse; private HttpServer httpServer; @Before public void startHttpServer() throws IOException { this.sniffRequestTimeout = RandomInts.randomIntBetween(getRandom(), 1000, 10000); - this.scheme = RandomPicks.randomFrom(getRandom(), HostsSniffer.Scheme.values()); + this.scheme = RandomPicks.randomFrom(getRandom(), ElasticsearchHostsSniffer.Scheme.values()); if (rarely()) { this.sniffResponse = SniffResponse.buildFailure(); } else { @@ -85,14 +85,35 @@ public class HostsSnifferTests extends RestClientTestCase { httpServer.stop(0); } + public void testConstructorValidation() throws IOException { + try { + new ElasticsearchHostsSniffer(null, 1, ElasticsearchHostsSniffer.Scheme.HTTP); + fail("should have failed"); + } catch(NullPointerException e) { + assertEquals("restClient cannot be null", e.getMessage()); + } + HttpHost httpHost = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()); + try (RestClient restClient = RestClient.builder(httpHost).build()) { + try { + new ElasticsearchHostsSniffer(restClient, 1, null); + fail("should have failed"); + } catch (NullPointerException e) { + assertEquals(e.getMessage(), "scheme cannot be null"); + } + try { + new ElasticsearchHostsSniffer(restClient, RandomInts.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0), + ElasticsearchHostsSniffer.Scheme.HTTP); + fail("should have failed"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "sniffRequestTimeoutMillis must be greater than 0"); + } + } + } + public void testSniffNodes() throws IOException { HttpHost httpHost = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()); try (RestClient restClient = RestClient.builder(httpHost).build()) { - HostsSniffer.Builder builder = HostsSniffer.builder(restClient).setSniffRequestTimeoutMillis(sniffRequestTimeout); - if (scheme != HostsSniffer.Scheme.HTTP || randomBoolean()) { - builder.setScheme(scheme); - } - HostsSniffer sniffer = builder.build(); + ElasticsearchHostsSniffer sniffer = new ElasticsearchHostsSniffer(restClient, sniffRequestTimeout, scheme); try { List sniffedHosts = sniffer.sniffHosts(); if (sniffResponse.isFailure) { @@ -153,7 +174,7 @@ public class HostsSnifferTests extends RestClientTestCase { } } - private static SniffResponse buildSniffResponse(HostsSniffer.Scheme scheme) throws IOException { + private static SniffResponse buildSniffResponse(ElasticsearchHostsSniffer.Scheme scheme) throws IOException { int numNodes = RandomInts.randomIntBetween(getRandom(), 1, 5); List hosts = new ArrayList<>(numNodes); JsonFactory jsonFactory = new JsonFactory(); diff --git a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferBuilderTests.java b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferBuilderTests.java deleted file mode 100644 index c167a3a104b..00000000000 --- a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/HostsSnifferBuilderTests.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.client.sniff; - -import com.carrotsearch.randomizedtesting.generators.RandomInts; -import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.apache.http.HttpHost; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestClientTestCase; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.fail; - -public class HostsSnifferBuilderTests extends RestClientTestCase { - - public void testBuild() throws Exception { - try { - HostsSniffer.builder(null); - fail("should have failed"); - } catch(NullPointerException e) { - assertEquals(e.getMessage(), "restClient cannot be null"); - } - - int numNodes = RandomInts.randomIntBetween(getRandom(), 1, 5); - HttpHost[] hosts = new HttpHost[numNodes]; - for (int i = 0; i < numNodes; i++) { - hosts[i] = new HttpHost("localhost", 9200 + i); - } - - try (RestClient client = RestClient.builder(hosts).build()) { - try { - HostsSniffer.builder(client).setScheme(null); - fail("should have failed"); - } catch(NullPointerException e) { - assertEquals(e.getMessage(), "scheme cannot be null"); - } - - try { - HostsSniffer.builder(client).setSniffRequestTimeoutMillis(RandomInts.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0)); - fail("should have failed"); - } catch(IllegalArgumentException e) { - assertEquals(e.getMessage(), "sniffRequestTimeoutMillis must be greater than 0"); - } - - HostsSniffer.Builder builder = HostsSniffer.builder(client); - if (getRandom().nextBoolean()) { - builder.setScheme(RandomPicks.randomFrom(getRandom(), HostsSniffer.Scheme.values())); - } - if (getRandom().nextBoolean()) { - builder.setSniffRequestTimeoutMillis(RandomInts.randomIntBetween(getRandom(), 1, Integer.MAX_VALUE)); - } - assertNotNull(builder.build()); - } - } -} diff --git a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/MockHostsSniffer.java b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/MockHostsSniffer.java index bdc052d07c8..5a52151d76e 100644 --- a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/MockHostsSniffer.java +++ b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/MockHostsSniffer.java @@ -22,18 +22,15 @@ package org.elasticsearch.client.sniff; import org.apache.http.HttpHost; import java.io.IOException; -import java.util.ArrayList; +import java.util.Collections; import java.util.List; -class MockHostsSniffer extends HostsSniffer { - MockHostsSniffer() { - super(null, -1, null); - } - +/** + * Mock implementation of {@link HostsSniffer}. Useful to prevent any connection attempt while testing builders etc. + */ +class MockHostsSniffer implements HostsSniffer { @Override public List sniffHosts() throws IOException { - List hosts = new ArrayList<>(); - hosts.add(new HttpHost("localhost", 9200)); - return hosts; + return Collections.singletonList(new HttpHost("localhost", 9200)); } } diff --git a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/SniffOnFailureListenerTests.java b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/SniffOnFailureListenerTests.java index bbb1de35663..1fece270ffe 100644 --- a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/SniffOnFailureListenerTests.java +++ b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/SniffOnFailureListenerTests.java @@ -46,7 +46,7 @@ public class SniffOnFailureListenerTests extends RestClientTestCase { } try (RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)).build()) { - try (Sniffer sniffer = Sniffer.builder(restClient, new MockHostsSniffer()).build()) { + try (Sniffer sniffer = Sniffer.builder(restClient).setHostsSniffer(new MockHostsSniffer()).build()) { listener.setSniffer(sniffer); try { listener.setSniffer(sniffer); diff --git a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/SnifferBuilderTests.java b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/SnifferBuilderTests.java index defa83554a4..b0c387d733a 100644 --- a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/SnifferBuilderTests.java +++ b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/SnifferBuilderTests.java @@ -37,50 +37,52 @@ public class SnifferBuilderTests extends RestClientTestCase { hosts[i] = new HttpHost("localhost", 9200 + i); } - HostsSniffer hostsSniffer = new MockHostsSniffer(); - try (RestClient client = RestClient.builder(hosts).build()) { try { - Sniffer.builder(null, hostsSniffer).build(); + Sniffer.builder(null).build(); fail("should have failed"); } catch(NullPointerException e) { assertEquals("restClient cannot be null", e.getMessage()); } try { - Sniffer.builder(client, null).build(); - fail("should have failed"); - } catch(NullPointerException e) { - assertEquals("hostsSniffer cannot be null", e.getMessage()); - } - - try { - Sniffer.builder(client, hostsSniffer) - .setSniffIntervalMillis(RandomInts.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0)); + Sniffer.builder(client).setSniffIntervalMillis(RandomInts.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0)); fail("should have failed"); } catch(IllegalArgumentException e) { assertEquals("sniffIntervalMillis must be greater than 0", e.getMessage()); } try { - Sniffer.builder(client, hostsSniffer) - .setSniffAfterFailureDelayMillis(RandomInts.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0)); + Sniffer.builder(client).setSniffAfterFailureDelayMillis(RandomInts.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0)); fail("should have failed"); } catch(IllegalArgumentException e) { assertEquals("sniffAfterFailureDelayMillis must be greater than 0", e.getMessage()); } - try (Sniffer sniffer = Sniffer.builder(client, hostsSniffer).build()) { + + try { + Sniffer.builder(client).setHostsSniffer(null); + fail("should have failed"); + } catch(NullPointerException e) { + assertEquals("hostsSniffer cannot be null", e.getMessage()); + } + + + try (Sniffer sniffer = Sniffer.builder(client).build()) { assertNotNull(sniffer); } - Sniffer.Builder builder = Sniffer.builder(client, hostsSniffer); + SnifferBuilder builder = Sniffer.builder(client); if (getRandom().nextBoolean()) { builder.setSniffIntervalMillis(RandomInts.randomIntBetween(getRandom(), 1, Integer.MAX_VALUE)); } if (getRandom().nextBoolean()) { builder.setSniffAfterFailureDelayMillis(RandomInts.randomIntBetween(getRandom(), 1, Integer.MAX_VALUE)); } + if (getRandom().nextBoolean()) { + builder.setHostsSniffer(new MockHostsSniffer()); + } + try (Sniffer sniffer = builder.build()) { assertNotNull(sniffer); } From a182e356d3e49f35e1197ea4f8a84be2747ebf2c Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 26 Jul 2016 11:35:17 -0400 Subject: [PATCH 76/93] Fix unit test build failure We didn't catch the failure because we tested against the fork instead of master. I think. --- .../java/org/elasticsearch/action/DocWriteResponseTests.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java b/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java index e652a670a67..7b2dac678fd 100644 --- a/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java @@ -19,12 +19,13 @@ package org.elasticsearch.action; +import org.elasticsearch.action.DocWriteResponse.Operation; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; public class DocWriteResponseTests extends ESTestCase { public void testGetLocation() { - DocWriteResponse response = new DocWriteResponse(new ShardId("index", "uuid", 0), "type", "id", 0) { + DocWriteResponse response = new DocWriteResponse(new ShardId("index", "uuid", 0), "type", "id", 0, Operation.CREATE) { // DocWriteResponse is abstract so we have to sneak a subclass in here to test it. }; assertEquals("/index/type/id", response.getLocation(null)); From 95ae58b53158916b4d9e4717f159696502d5ed85 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 26 Jul 2016 17:54:46 +0200 Subject: [PATCH 77/93] Set up project substitution for client:transport This ensures that projects depending on client:transport (such as client:benchmark) properly resolve this dependency. --- build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/build.gradle b/build.gradle index c2bf6c9309f..f1b57d7857b 100644 --- a/build.gradle +++ b/build.gradle @@ -166,6 +166,7 @@ subprojects { "org.elasticsearch.client:rest:${version}": ':client:rest', "org.elasticsearch.client:sniffer:${version}": ':client:sniffer', "org.elasticsearch.client:test:${version}": ':client:test', + "org.elasticsearch.client:transport:${version}": ':client:transport', "org.elasticsearch.test:framework:${version}": ':test:framework', "org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:integ-test-zip', "org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:zip', From 8151224883de898e7b76c343cd22b47ab076cf53 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 26 Jul 2016 18:01:22 +0200 Subject: [PATCH 78/93] add `Socket closed` variant to NetworkExceptionHelper.isCloseConnectionException --- .../elasticsearch/common/transport/NetworkExceptionHelper.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/common/transport/NetworkExceptionHelper.java b/core/src/main/java/org/elasticsearch/common/transport/NetworkExceptionHelper.java index 0317026b6be..5d6211c3fec 100644 --- a/core/src/main/java/org/elasticsearch/common/transport/NetworkExceptionHelper.java +++ b/core/src/main/java/org/elasticsearch/common/transport/NetworkExceptionHelper.java @@ -58,6 +58,9 @@ public class NetworkExceptionHelper { if (e.getMessage().equals("Socket is closed")) { return true; } + if (e.getMessage().equals("Socket closed")) { + return true; + } } return false; } From dbdb6341a5f954901064f071651f4e2c0b1e8c29 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 26 Jul 2016 18:01:50 +0200 Subject: [PATCH 79/93] increase logging information in testConcurrentSendRespondAndDisconnect --- .../transport/AbstractSimpleTransportTestCase.java | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index e7f659751b5..73855c13673 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.node.Node; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; @@ -127,7 +128,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { private MockTransportService buildService(final String name, final Version version) { MockTransportService service = build( Settings.builder() - .put("name", name) + .put(Node.NODE_NAME_SETTING.getKey(), name) .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") .build(), @@ -488,6 +489,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { assertThat(latch.await(5, TimeUnit.SECONDS), equalTo(true)); } + @TestLogging("transport:DEBUG,transport.tracer:TRACE") public void testConcurrentSendRespondAndDisconnect() throws BrokenBarrierException, InterruptedException { Set sendingErrors = ConcurrentCollections.newConcurrentSet(); Set responseErrors = ConcurrentCollections.newConcurrentSet(); @@ -505,7 +507,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { channel.sendResponse(new TestResponse()); } catch (Exception e) { // we don't really care what's going on B, we're testing through A - logger.trace("caught exception while res ponding from node B", e); + logger.trace("caught exception while responding from node B", e); } }; serviceB.registerRequestHandler("test", TestRequest::new, ThreadPool.Names.SAME, ignoringRequestHandler); @@ -586,10 +588,10 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { if (i % 3 == 0) { // simulate restart of nodeB serviceB.close(); - MockTransportService newService = buildService("TS_B", version1); + MockTransportService newService = buildService("TS_B_" + i, version1); newService.registerRequestHandler("test", TestRequest::new, ThreadPool.Names.SAME, ignoringRequestHandler); serviceB = newService; - nodeB = new DiscoveryNode("TS_B", serviceB.boundAddress().publishAddress(), emptyMap(), emptySet(), version1); + nodeB = new DiscoveryNode("TS_B_" + i, "TS_B", serviceB.boundAddress().publishAddress(), emptyMap(), emptySet(), version1); serviceB.connectToNode(nodeA); serviceA.connectToNode(nodeB); } else if (serviceA.nodeConnected(nodeB)) { @@ -786,7 +788,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { assertTrue(inFlight.tryAcquire(Integer.MAX_VALUE, 10, TimeUnit.SECONDS)); } - @TestLogging(value = "test. transport.tracer:TRACE") + @TestLogging(value = "test.transport.tracer:TRACE") public void testTracerLog() throws InterruptedException { TransportRequestHandler handler = new TransportRequestHandler() { @Override From fabfd425f087c8a6e35c552c531d7629fbeeb910 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 26 Jul 2016 18:04:05 +0200 Subject: [PATCH 80/93] remove socket timeout from MockTcpTransport added in b208a7dbaeeb269e6d1e121f46a29cb6b0f8004f --- .../main/java/org/elasticsearch/transport/MockTcpTransport.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java index 8a0828794da..c128ee49022 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -43,7 +43,6 @@ import java.io.BufferedOutputStream; import java.io.Closeable; import java.io.IOException; import java.io.OutputStream; -import java.io.UncheckedIOException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; @@ -210,7 +209,6 @@ public class MockTcpTransport extends TcpTransport private void configureSocket(Socket socket) throws SocketException { socket.setTcpNoDelay(TCP_NO_DELAY.get(settings)); - socket.setSoTimeout(15000); ByteSizeValue tcpSendBufferSize = TCP_SEND_BUFFER_SIZE.get(settings); if (tcpSendBufferSize.bytes() > 0) { socket.setSendBufferSize(tcpSendBufferSize.bytesAsInt()); From 3c0288ee987578dc8520e5ea15a02ed60b22dd9a Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 22 Jul 2016 18:51:36 -0400 Subject: [PATCH 81/93] Consolify term and phrase suggester docs This includes a working example of reverse filters to support correcting prefix errors. --- docs/build.gradle | 7 + docs/reference/indices/flush.asciidoc | 10 +- docs/reference/search/suggesters.asciidoc | 237 +++++------------- .../search/suggesters/phrase-suggest.asciidoc | 236 +++++++++-------- docs/reference/search/uri-request.asciidoc | 8 +- 5 files changed, 209 insertions(+), 289 deletions(-) diff --git a/docs/build.gradle b/docs/build.gradle index 26560ce064a..5459c9a754f 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -63,6 +63,13 @@ buildRestTests.docs = fileTree(projectDir) { Closure setupTwitter = { String name, int count -> buildRestTests.setups[name] = ''' + - do: + indices.create: + index: twitter + body: + settings: + number_of_shards: 1 + number_of_replicas: 1 - do: bulk: index: twitter diff --git a/docs/reference/indices/flush.asciidoc b/docs/reference/indices/flush.asciidoc index acf2b7c1a6a..5864c16d4c1 100644 --- a/docs/reference/indices/flush.asciidoc +++ b/docs/reference/indices/flush.asciidoc @@ -153,18 +153,18 @@ sync-flushed: -------------------------------------------------- { "_shards": { - "total": 10, - "successful": 10, + "total": 2, + "successful": 2, "failed": 0 }, "twitter": { - "total": 10, - "successful": 10, + "total": 2, + "successful": 2, "failed": 0 } } -------------------------------------------------- -// TESTRESPONSE[s/"successful": 10/"successful": 5/] +// TESTRESPONSE[s/"successful": 2/"successful": 1/] Here is what it looks like when one shard group failed due to pending operations: diff --git a/docs/reference/search/suggesters.asciidoc b/docs/reference/search/suggesters.asciidoc index 71a62262338..2da4a607307 100644 --- a/docs/reference/search/suggesters.asciidoc +++ b/docs/reference/search/suggesters.asciidoc @@ -10,15 +10,25 @@ The suggest request part is either defined alongside the query part in a [source,js] -------------------------------------------------- -curl -s -XPOST 'localhost:9200/_search' -d '{ +POST twitter/_search +{ "query" : { - ... + "match": { + "message": "tring out Elasticsearch" + } }, "suggest" : { - ... + "my-suggestion" : { + "text" : "trying out Elasticsearch", + "term" : { + "field" : "message" + } + } } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] Suggest requests executed against the `_suggest` endpoint should omit the surrounding `suggest` element which is only used if the suggest @@ -26,15 +36,18 @@ request is part of a search. [source,js] -------------------------------------------------- -curl -XPOST 'localhost:9200/_suggest' -d '{ +POST _suggest +{ "my-suggestion" : { - "text" : "the amsterdma meetpu", + "text" : "tring out Elasticsearch", "term" : { - "field" : "body" + "field" : "message" } } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] Several suggestions can be specified per request. Each suggestion is identified with an arbitrary name. In the example below two suggestions @@ -43,21 +56,24 @@ the `term` suggester, but have a different `text`. [source,js] -------------------------------------------------- -"suggest" : { +POST _suggest +{ "my-suggest-1" : { - "text" : "the amsterdma meetpu", + "text" : "tring out Elasticsearch", "term" : { - "field" : "body" + "field" : "message" } }, "my-suggest-2" : { - "text" : "the rottredam meetpu", + "text" : "kmichy", "term" : { - "field" : "title" + "field" : "user" } } } -------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] The below suggest response example includes the suggestion response for `my-suggest-1` and `my-suggest-2`. Each suggestion part contains @@ -68,44 +84,35 @@ in the suggest text and if found an arbitrary number of options. [source,js] -------------------------------------------------- { - ... - "suggest": { - "my-suggest-1": [ - { - "text" : "amsterdma", - "offset": 4, - "length": 9, - "options": [ - ... - ] - }, - ... - ], - "my-suggest-2" : [ - ... - ] - } - ... + "_shards": ... + "my-suggest-1": [ { + "text": "tring", + "offset": 0, + "length": 5, + "options": [ {"text": "trying", "score": 0.8, "freq": 1 } ] + }, { + "text": "out", + "offset": 6, + "length": 3, + "options": [] + }, { + "text": "elasticsearch", + "offset": 10, + "length": 13, + "options": [] + } ], + "my-suggest-2": ... } -------------------------------------------------- +// TESTRESPONSE[s/"_shards": \.\.\./"_shards": "$body._shards",/] +// TESTRESPONSE[s/"my-suggest-2": \.\.\./"my-suggest-2": "$body.my-suggest-2"/] + Each options array contains an option object that includes the suggested text, its document frequency and score compared to the suggest entry text. The meaning of the score depends on the used suggester. The term suggester's score is based on the edit distance. -[source,js] --------------------------------------------------- -"options": [ - { - "text": "amsterdam", - "freq": 77, - "score": 0.8888889 - }, - ... -] --------------------------------------------------- - [float] [[global-suggest]] === Global suggest text @@ -116,157 +123,27 @@ and applies to the `my-suggest-1` and `my-suggest-2` suggestions. [source,js] -------------------------------------------------- -"suggest" : { - "text" : "the amsterdma meetpu", +POST _suggest +{ + "text" : "tring out Elasticsearch", "my-suggest-1" : { "term" : { - "field" : "title" + "field" : "message" } }, "my-suggest-2" : { "term" : { - "field" : "body" + "field" : "user" } } } -------------------------------------------------- +// CONSOLE The suggest text can in the above example also be specified as suggestion specific option. The suggest text specified on suggestion level override the suggest text on the global level. -[float] -=== Other suggest example - -In the below example we request suggestions for the following suggest -text: `devloping distibutd saerch engies` on the `title` field with a -maximum of 3 suggestions per term inside the suggest text. Note that in -this example we set `size` to `0`. This isn't required, but a -nice optimization. The suggestions are gathered in the `query` phase and -in the case that we only care about suggestions (so no hits) we don't -need to execute the `fetch` phase. - -[source,js] --------------------------------------------------- -curl -s -XPOST 'localhost:9200/_search' -d '{ - "size": 0, - "suggest" : { - "my-title-suggestions-1" : { - "text" : "devloping distibutd saerch engies", - "term" : { - "size" : 3, - "field" : "title" - } - } - } -}' --------------------------------------------------- - -The above request could yield the response as stated in the code example -below. As you can see if we take the first suggested options of each -suggestion entry we get `developing distributed search engines` as -result. - -[source,js] --------------------------------------------------- -{ - ... - "suggest": { - "my-title-suggestions-1": [ - { - "text": "devloping", - "offset": 0, - "length": 9, - "options": [ - { - "text": "developing", - "freq": 77, - "score": 0.8888889 - }, - { - "text": "deloping", - "freq": 1, - "score": 0.875 - }, - { - "text": "deploying", - "freq": 2, - "score": 0.7777778 - } - ] - }, - { - "text": "distibutd", - "offset": 10, - "length": 9, - "options": [ - { - "text": "distributed", - "freq": 217, - "score": 0.7777778 - }, - { - "text": "disributed", - "freq": 1, - "score": 0.7777778 - }, - { - "text": "distribute", - "freq": 1, - "score": 0.7777778 - } - ] - }, - { - "text": "saerch", - "offset": 20, - "length": 6, - "options": [ - { - "text": "search", - "freq": 1038, - "score": 0.8333333 - }, - { - "text": "smerch", - "freq": 3, - "score": 0.8333333 - }, - { - "text": "serch", - "freq": 2, - "score": 0.8 - } - ] - }, - { - "text": "engies", - "offset": 27, - "length": 6, - "options": [ - { - "text": "engines", - "freq": 568, - "score": 0.8333333 - }, - { - "text": "engles", - "freq": 3, - "score": 0.8333333 - }, - { - "text": "eggies", - "freq": 1, - "score": 0.8333333 - } - ] - } - ] - } - ... -} --------------------------------------------------- - include::suggesters/term-suggest.asciidoc[] include::suggesters/phrase-suggest.asciidoc[] @@ -274,5 +151,3 @@ include::suggesters/phrase-suggest.asciidoc[] include::suggesters/completion-suggest.asciidoc[] include::suggesters/context-suggest.asciidoc[] - - diff --git a/docs/reference/search/suggesters/phrase-suggest.asciidoc b/docs/reference/search/suggesters/phrase-suggest.asciidoc index 7ba1c93540b..487075a5677 100644 --- a/docs/reference/search/suggesters/phrase-suggest.asciidoc +++ b/docs/reference/search/suggesters/phrase-suggest.asciidoc @@ -17,36 +17,94 @@ co-occurrence and frequencies. ==== API Example -The `phrase` request is defined along side the query part in the json -request: +In general the `phrase` suggester requires special mapping up front to work. +The `phrase` suggester examples on this page need the following mapping to +work. The `reverse` analyzer is used only in the last example. [source,js] -------------------------------------------------- -curl -XPOST 'localhost:9200/_search' -d '{ - "suggest" : { - "text" : "Xor the Got-Jewel", - "simple_phrase" : { - "phrase" : { - "analyzer" : "body", - "field" : "bigram", - "size" : 1, - "real_word_error_likelihood" : 0.95, - "max_errors" : 0.5, - "gram_size" : 2, - "direct_generator" : [ { - "field" : "body", - "suggest_mode" : "always", - "min_word_length" : 1 - } ], - "highlight": { - "pre_tag": "", - "post_tag": "" +POST test +{ + "settings": { + "index": { + "number_of_shards": 1, + "analysis": { + "analyzer": { + "trigram": { + "type": "custom", + "tokenizer": "standard", + "filter": ["standard", "shingle"] + }, + "reverse": { + "type": "custom", + "tokenizer": "standard", + "filter": ["standard", "reverse"] + } + }, + "filter": { + "shingle": { + "type": "shingle", + "min_shingle_size": 2, + "max_shingle_size": 3 + } + } + } + } + }, + "mappings": { + "test": { + "properties": { + "title": { + "type": "text", + "fields": { + "trigram": { + "type": "text", + "analyzer": "trigram" + }, + "reverse": { + "type": "text", + "analyzer": "reverse" + } + } } } } } -}' +} +POST test/test +{"title": "noble warriors"} +POST test/test +{"title": "nobel prize"} +POST _refresh -------------------------------------------------- +// TESTSETUP + +Once you have the analyzers and mappings set up you can use the `phrase` +suggester in the same spot you'd use the `term` suggester: + +[source,js] +-------------------------------------------------- +POST _suggest?pretty -d' +{ + "text": "noble prize", + "simple_phrase": { + "phrase": { + "field": "title.trigram", + "size": 1, + "gram_size": 3, + "direct_generator": [ { + "field": "title.trigram", + "suggest_mode": "always" + } ], + "highlight": { + "pre_tag": "", + "post_tag": "" + } + } + } +} +-------------------------------------------------- +// CONSOLE The response contains suggestions scored by the most likely spell correction first. In this case we received the expected correction @@ -57,37 +115,23 @@ can contain misspellings (See parameter descriptions below). [source,js] -------------------------------------------------- - { - "took" : 5, - "timed_out" : false, - "_shards" : { - "total" : 5, - "successful" : 5, - "failed" : 0 - }, - "hits" : { - "total" : 2938, - "max_score" : 0.0, - "hits" : [ ] - }, - "suggest" : { - "simple_phrase" : [ { - "text" : "Xor the Got-Jewel", +{ + "_shards": ... + "simple_phrase" : [ + { + "text" : "noble prize", "offset" : 0, - "length" : 17, + "length" : 11, "options" : [ { - "text" : "xorr the god jewel", - "highlighted": "xorr the god jewel", - "score" : 0.17877324 - }, { - "text" : "xor the god jewel", - "highlighted": "xor the god jewel", - "score" : 0.14231323 - } ] - } ] - } + "text" : "nobel prize", + "highlighted": "nobel prize", + "score" : 0.40765354 + }] + } + ] } -------------------------------------------------- +// TESTRESPONSE[s/"_shards": .../"_shards": "$body._shards",/] ==== Basic Phrase suggest API parameters @@ -178,34 +222,34 @@ can contain misspellings (See parameter descriptions below). [source,js] -------------------------------------------------- -curl -XPOST 'localhost:9200/_search' -d { - "suggest" : { - "text" : "Xor the Got-Jewel", - "simple_phrase" : { - "phrase" : { - "field" : "bigram", - "size" : 1, - "direct_generator" : [ { - "field" : "body", - "suggest_mode" : "always", - "min_word_length" : 1 - } ], - "collate": { - "query": { <1> - "inline" : { - "match": { - "{{field_name}}" : "{{suggestion}}" <2> - } - } - }, - "params": {"field_name" : "title"}, <3> - "prune": true <4> - } - } - } - } - } +POST _suggest +{ + "text" : "noble prize", + "simple_phrase" : { + "phrase" : { + "field" : "title.trigram", + "size" : 1, + "direct_generator" : [ { + "field" : "title.trigram", + "suggest_mode" : "always", + "min_word_length" : 1 + } ], + "collate": { + "query": { <1> + "inline" : { + "match": { + "{{field_name}}" : "{{suggestion}}" <2> + } + } + }, + "params": {"field_name" : "title"}, <3> + "prune": true <4> + } + } + } +} -------------------------------------------------- +// CONSOLE <1> This query will be run once for every suggestion. <2> The `{{suggestion}}` variable will be replaced by the text of each suggestion. @@ -342,33 +386,27 @@ accept ordinary analyzer names. [source,js] -------------------------------------------------- -curl -s -XPOST 'localhost:9200/_search' -d { - "suggest" : { - "text" : "Xor the Got-Jewel", - "simple_phrase" : { - "phrase" : { - "analyzer" : "body", - "field" : "bigram", - "size" : 4, - "real_word_error_likelihood" : 0.95, - "confidence" : 2.0, - "gram_size" : 2, - "direct_generator" : [ { - "field" : "body", - "suggest_mode" : "always", - "min_word_length" : 1 - }, { - "field" : "reverse", - "suggest_mode" : "always", - "min_word_length" : 1, - "pre_filter" : "reverse", - "post_filter" : "reverse" - } ] - } +POST _suggest +{ + "text" : "obel prize", + "simple_phrase" : { + "phrase" : { + "field" : "title.trigram", + "size" : 1, + "direct_generator" : [ { + "field" : "title.trigram", + "suggest_mode" : "always" + }, { + "field" : "title.reverse", + "suggest_mode" : "always", + "pre_filter" : "reverse", + "post_filter" : "reverse" + } ] } } } -------------------------------------------------- +// CONSOLE `pre_filter` and `post_filter` can also be used to inject synonyms after candidates are generated. For instance for the query `captain usq` we diff --git a/docs/reference/search/uri-request.asciidoc b/docs/reference/search/uri-request.asciidoc index ba36992f6fb..95ce6a8ff6a 100644 --- a/docs/reference/search/uri-request.asciidoc +++ b/docs/reference/search/uri-request.asciidoc @@ -21,19 +21,19 @@ And here is a sample response: "timed_out": false, "took": 62, "_shards":{ - "total" : 5, - "successful" : 5, + "total" : 1, + "successful" : 1, "failed" : 0 }, "hits":{ "total" : 1, - "max_score": 0.2876821, + "max_score": 1.3862944, "hits" : [ { "_index" : "twitter", "_type" : "tweet", "_id" : "0", - "_score": 0.2876821, + "_score": 1.3862944, "_source" : { "user" : "kimchy", "date" : "2009-11-15T14:12:12", From 0553ba9151dd5882391d076216b9363679a64949 Mon Sep 17 00:00:00 2001 From: Chris Earle Date: Tue, 26 Jul 2016 13:14:27 -0400 Subject: [PATCH 82/93] [Ingest] Add REST _ingest/pipeline to get all pipelines This adds an extra REST handler for "_ingest/pipeline" so that users do not need to supply "_ingest/pipeline/*" to get all of them. - Also adds a teardown section to related REST-tests for ingest. --- .../action/ingest/GetPipelineRequest.java | 9 ++-- .../elasticsearch/ingest/PipelineStore.java | 5 ++ .../action/ingest/RestGetPipelineAction.java | 1 + .../ingest/PipelineStoreTests.java | 16 ++++-- .../ingest/100_date_index_name_processor.yaml | 7 +++ .../rest-api-spec/test/ingest/110_sort.yaml | 7 +++ .../rest-api-spec/test/ingest/120_grok.yaml | 7 +++ .../rest-api-spec/test/ingest/20_crud.yaml | 52 +++++++++++++++++++ .../test/ingest/30_date_processor.yaml | 7 +++ .../rest-api-spec/test/ingest/40_mutate.yaml | 7 +++ .../test/ingest/50_on_failure.yaml | 7 +++ .../rest-api-spec/test/ingest/60_fail.yaml | 7 +++ .../rest-api-spec/test/ingest/70_bulk.yaml | 11 ++++ .../rest-api-spec/test/ingest/80_foreach.yaml | 7 +++ .../test/ingest/90_simulate.yaml | 7 +++ .../api/ingest.get_pipeline.json | 2 +- 16 files changed, 150 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java index 6525c26c6c6..1ba22bce805 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java @@ -21,26 +21,25 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.action.ValidateActions.addValidationError; public class GetPipelineRequest extends MasterNodeReadRequest { private String[] ids; public GetPipelineRequest(String... ids) { - if (ids == null || ids.length == 0) { - throw new IllegalArgumentException("No ids specified"); + if (ids == null) { + throw new IllegalArgumentException("ids cannot be null"); } this.ids = ids; } GetPipelineRequest() { + this.ids = Strings.EMPTY_ARRAY; } public String[] getIds() { diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java index 50ae19a2ca1..713001c233e 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -207,6 +207,11 @@ public class PipelineStore extends AbstractComponent implements ClusterStateList return Collections.emptyList(); } + // if we didn't ask for _any_ ID, then we get them all (this is the same as if they ask for '*') + if (ids.length == 0) { + return new ArrayList<>(ingestMetadata.getPipelines().values()); + } + List result = new ArrayList<>(ids.length); for (String id : ids) { if (Regex.isSimpleMatchPattern(id)) { diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java index 55f14cfaa84..2b7adedcacf 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java @@ -35,6 +35,7 @@ public class RestGetPipelineAction extends BaseRestHandler { @Inject public RestGetPipelineAction(Settings settings, RestController controller) { super(settings); + controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline", this); controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}", this); } diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index 1510d25b695..29032ae4326 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -28,11 +28,9 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -48,7 +46,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -import static org.mockito.Mockito.mock; public class PipelineStoreTests extends ESTestCase { @@ -216,6 +213,19 @@ public class PipelineStoreTests extends ESTestCase { assertThat(pipelines.size(), equalTo(2)); assertThat(pipelines.get(0).getId(), equalTo("_id1")); assertThat(pipelines.get(1).getId(), equalTo("_id2")); + + // get all variants: (no IDs or '*') + pipelines = store.innerGetPipelines(ingestMetadata); + pipelines.sort((o1, o2) -> o1.getId().compareTo(o2.getId())); + assertThat(pipelines.size(), equalTo(2)); + assertThat(pipelines.get(0).getId(), equalTo("_id1")); + assertThat(pipelines.get(1).getId(), equalTo("_id2")); + + pipelines = store.innerGetPipelines(ingestMetadata, "*"); + pipelines.sort((o1, o2) -> o1.getId().compareTo(o2.getId())); + assertThat(pipelines.size(), equalTo(2)); + assertThat(pipelines.get(0).getId(), equalTo("_id1")); + assertThat(pipelines.get(1).getId(), equalTo("_id2")); } public void testCrud() throws Exception { diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/100_date_index_name_processor.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/100_date_index_name_processor.yaml index c99c1025c10..ccf83cc96bf 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/100_date_index_name_processor.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/100_date_index_name_processor.yaml @@ -1,3 +1,10 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "1" + ignore: 404 + --- "Test date index name processor with defaults": - do: diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/110_sort.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/110_sort.yaml index 096e57fa4e1..e18a0dcd252 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/110_sort.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/110_sort.yaml @@ -1,3 +1,10 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + --- "Test sort Processor": - do: diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/120_grok.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/120_grok.yaml index a51f5a102b0..0d77ccce61c 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/120_grok.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/120_grok.yaml @@ -1,3 +1,10 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + --- "Test Grok Pipeline": - do: diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml index b494161aff1..2fb6ac230ff 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/20_crud.yaml @@ -1,3 +1,18 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + - do: + ingest.delete_pipeline: + id: "first_pipeline" + ignore: 404 + - do: + ingest.delete_pipeline: + id: "second_pipeline" + ignore: 404 + --- "Test basic pipeline crud": - do: @@ -23,6 +38,11 @@ - match: { pipelines.0.id: "my_pipeline" } - match: { pipelines.0.config.description: "_description" } + - do: + ingest.get_pipeline: {} + - match: { pipelines.0.id: "my_pipeline" } + - match: { pipelines.0.config.description: "_description" } + - do: ingest.delete_pipeline: id: "my_pipeline" @@ -33,6 +53,38 @@ ingest.get_pipeline: id: "my_pipeline" +--- +"Test Get All Pipelines (unordered)": + - do: + ingest.put_pipeline: + id: "first_pipeline" + body: > + { + "description": "first", + "processors": [ + { + "set" : { + "field" : "field1", + "value": "_value" + } + } + ] + } + - do: + ingest.put_pipeline: + id: "second_pipeline" + body: > + { + "description": "second", + "processors": [] + } + + # Order is not guaranteed by the response, so we check for length instead; above tests that we get appropriate values + - do: + ingest.get_pipeline: {} + - length: { pipelines: 2 } + + --- "Test invalid config": - do: diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/30_date_processor.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/30_date_processor.yaml index 33e25e4989b..93dfa52196f 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/30_date_processor.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/30_date_processor.yaml @@ -1,3 +1,10 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + --- "Test date processor": - do: diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/40_mutate.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/40_mutate.yaml index c5d27f720cb..8150891ebd0 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/40_mutate.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/40_mutate.yaml @@ -1,3 +1,10 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + --- "Test mutate processors": - do: diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/50_on_failure.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/50_on_failure.yaml index a4395427e04..53c1a9a7923 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/50_on_failure.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/50_on_failure.yaml @@ -1,3 +1,10 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + --- "Test Pipeline With On Failure Block": - do: diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/60_fail.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/60_fail.yaml index 019c229ae38..e080991e13c 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/60_fail.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/60_fail.yaml @@ -1,3 +1,10 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + --- "Test Fail Processor": - do: diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/70_bulk.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/70_bulk.yaml index d834d86b2ea..cf428b32524 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/70_bulk.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/70_bulk.yaml @@ -31,6 +31,17 @@ setup: ] } +--- +teardown: + - do: + ingest.delete_pipeline: + id: "pipeline1" + ignore: 404 + - do: + ingest.delete_pipeline: + id: "pipeline2" + ignore: 404 + --- "Test bulk request without default pipeline": diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/80_foreach.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/80_foreach.yaml index 2ebfc089396..48080433fd2 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/80_foreach.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/80_foreach.yaml @@ -1,3 +1,10 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + --- "Test foreach Processor": - do: diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yaml index 7ce30ee3bde..8b08535c124 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yaml @@ -1,3 +1,10 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + --- "Test simulate with stored ingest pipeline": - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json index 6c50657ae1a..5574ddbf5b7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json @@ -4,7 +4,7 @@ "methods": [ "GET" ], "url": { "path": "/_ingest/pipeline/{id}", - "paths": [ "/_ingest/pipeline/{id}" ], + "paths": [ "/_ingest/pipeline", "/_ingest/pipeline/{id}" ], "parts": { "id": { "type" : "string", From 9270e8b22b2df5ae09726464419c179e18d9a3a7 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 25 Jul 2016 18:58:44 -0400 Subject: [PATCH 83/93] Rename client yaml test infrastructure This makes it obvious that these tests are for running the client yaml suites. Now that there are other ways of running tests using the REST client against a running cluster we can't go on calling the shared client yaml tests "REST tests". They are rest tests, but they aren't **the** rest tests. --- .../ExceptionSerializationTests.java | 2 +- ...tIT.java => DebClientYamlTestSuiteIT.java} | 11 +-- .../IntegTestZipClientYamlTestSuiteIT.java | 40 +++++++++++ .../test/rest/RpmClientYamlTestSuiteIT.java} | 11 +-- .../test/rest/TarClientYamlTestSuiteIT.java} | 11 +-- .../org/elasticsearch/test/rest/RestIT.java | 37 ---------- .../test/rest/ZipClientYamlTestSuiteIT.java} | 11 +-- ...IT.java => DocsClientYamlTestSuiteIT.java} | 12 ++-- ... => MatrixStatsClientYamlTestSuiteIT.java} | 16 ++--- ...=> IngestCommonClientYamlTestSuiteIT.java} | 13 ++-- ... LangExpressionClientYamlTestSuiteIT.java} | 13 ++-- ...a => LangGroovyClientYamlTestSuiteIT.java} | 13 ++-- ...=> LangMustacheClientYamlTestSuiteIT.java} | 13 ++-- ...=> LangPainlessClientYamlTestSuiteIT.java} | 13 ++-- ...a => PercolatorClientYamlTestSuiteIT.java} | 13 ++-- ...java => ReindexClientYamlTestSuiteIT.java} | 12 ++-- ....java => Netty3ClientYamlTestSuiteIT.java} | 12 ++-- ....java => Netty4ClientYamlTestSuiteIT.java} | 14 ++-- ...tIT.java => IcuClientYamlTestSuiteIT.java} | 13 ++-- .../KuromojiClientYamlTestSuiteIT.java} | 13 ++-- .../PhoneticClientYamlTestSuiteIT.java | 42 ++++++++++++ .../SmartCNClientYamlTestSuiteIT.java} | 13 ++-- .../index/analysis/AnalysisPolishRestIT.java | 41 ------------ .../StempelClientYamlTestSuiteIT.java} | 13 ++-- ...eryAzureClassicClientYamlTestSuiteIT.java} | 13 ++-- ...ava => CloudAwsClientYamlTestSuiteIT.java} | 13 ++-- ...=> DiscoveryGceClientYamlTestSuiteIT.java} | 13 ++-- ...ngestAttachmentClientYamlTestSuiteIT.java} | 13 ++-- ... => IngestGeoIpClientYamlTestSuiteIT.java} | 15 ++--- ...IngestUserAgentClientYamlTestSuiteIT.java} | 13 ++-- ...a => JvmExampleClientYamlTestSuiteIT.java} | 13 ++-- ... LangJavascriptClientYamlTestSuiteIT.java} | 13 ++-- ...a => LangPythonClientYamlTestSuiteIT.java} | 13 ++-- ...pperAttachmentsClientYamlTestSuiteIT.java} | 13 ++-- ...> MapperMurmur3ClientYamlTestSuiteIT.java} | 13 ++-- ...a => MapperSizeClientYamlTestSuiteIT.java} | 13 ++-- ...RepositoryAzureClientYamlTestSuiteIT.java} | 13 ++-- ...> RepositoryGcsClientYamlTestSuiteIT.java} | 13 ++-- ... RepositoryHdfsClientYamlTestSuiteIT.java} | 13 ++-- ...=> RepositoryS3ClientYamlTestSuiteIT.java} | 13 ++-- ...ava => StoreSmbClientYamlTestSuiteIT.java} | 13 ++-- ... => Backwards50ClientYamlTestSuiteIT.java} | 12 ++-- ...tIngestDisabledClientYamlTestSuiteIT.java} | 15 +++-- ...gestWithAllDepsClientYamlTestSuiteIT.java} | 15 +++-- ...keTestMultiNodeClientYamlTestSuiteIT.java} | 15 +++-- ...mokeTestPluginsClientYamlTestSuiteIT.java} | 15 +++-- ...ndexWithPainlessClientYamlTestSuiteIT.java | 40 +++++++++++ .../SmokeTestReindexWithPainlessIT.java | 39 ----------- .../junit/listeners/ReproduceInfoPrinter.java | 8 +-- .../BlacklistedPathPatternMatcher.java | 2 +- .../ClientYamlTestCandidate.java} | 23 +++---- .../ClientYamlTestExecutionContext.java} | 30 ++++----- .../{ => yaml}/ESClientYamlSuiteTestCase.java | 67 ++++++++++--------- .../test/rest/{ => yaml}/ObjectPath.java | 2 +- .../test/rest/{ => yaml}/Stash.java | 2 +- .../client/ClientYamlTestClient.java} | 46 +++++++------ .../client/ClientYamlTestResponse.java} | 10 +-- .../ClientYamlTestResponseException.java} | 12 ++-- .../parser/ClientYamlTestFragmentParser.java} | 8 +-- .../parser/ClientYamlTestParseException.java} | 8 +-- .../parser/ClientYamlTestSectionParser.java} | 12 ++-- .../ClientYamlTestSuiteParseContext.java} | 50 +++++++------- .../parser/ClientYamlTestSuiteParser.java} | 25 ++++--- .../{ => yaml}/parser/DoSectionParser.java | 12 ++-- .../parser/GreaterThanEqualToParser.java | 12 ++-- .../{ => yaml}/parser/GreaterThanParser.java | 10 +-- .../rest/{ => yaml}/parser/IsFalseParser.java | 8 +-- .../rest/{ => yaml}/parser/IsTrueParser.java | 8 +-- .../rest/{ => yaml}/parser/LengthParser.java | 10 +-- .../parser/LessThanOrEqualToParser.java | 10 +-- .../{ => yaml}/parser/LessThanParser.java | 10 +-- .../rest/{ => yaml}/parser/MatchParser.java | 8 +-- .../{ => yaml}/parser/SetSectionParser.java | 10 +-- .../{ => yaml}/parser/SetupSectionParser.java | 10 +-- .../{ => yaml}/parser/SkipSectionParser.java | 16 ++--- .../parser/TeardownSectionParser.java | 10 +-- .../restspec/ClientYamlSuiteRestApi.java} | 43 ++++++------ .../ClientYamlSuiteRestApiParser.java} | 10 +-- .../restspec/ClientYamlSuiteRestPath.java} | 12 ++-- .../restspec/ClientYamlSuiteRestSpec.java} | 28 ++++---- .../{ => yaml}/section/ApiCallSection.java | 2 +- .../rest/{ => yaml}/section/Assertion.java | 10 +-- .../section/ClientYamlTestSection.java} | 10 +-- .../section/ClientYamlTestSuite.java} | 14 ++-- .../rest/{ => yaml}/section/DoSection.java | 20 +++--- .../{ => yaml}/section/ExecutableSection.java | 6 +- .../section/GreaterThanAssertion.java | 2 +- .../section/GreaterThanEqualToAssertion.java | 2 +- .../{ => yaml}/section/IsFalseAssertion.java | 2 +- .../{ => yaml}/section/IsTrueAssertion.java | 2 +- .../{ => yaml}/section/LengthAssertion.java | 2 +- .../{ => yaml}/section/LessThanAssertion.java | 2 +- .../section/LessThanOrEqualToAssertion.java | 2 +- .../{ => yaml}/section/MatchAssertion.java | 2 +- .../rest/{ => yaml}/section/SetSection.java | 6 +- .../rest/{ => yaml}/section/SetupSection.java | 2 +- .../rest/{ => yaml}/section/SkipSection.java | 4 +- .../{ => yaml}/section/TeardownSection.java | 2 +- .../rest/{ => yaml}/support/Features.java | 2 +- .../rest/{ => yaml}/support/FileUtils.java | 2 +- .../BlacklistedPathPatternMatcherTests.java | 3 +- .../rest/{test => yaml}/ObjectPathTests.java | 6 +- .../test/rest/{test => yaml}/StashTests.java | 4 +- .../parser}/AbstractParserTestCase.java | 2 +- .../parser}/AssertionParsersTests.java | 46 ++++++------- .../ClientYamlSuiteTestParserTests.java} | 36 +++++----- .../parser}/DoSectionParserTests.java | 44 ++++++------ .../parser}/SetSectionParserTests.java | 18 ++--- .../parser}/SetupSectionParserTests.java | 12 ++-- .../parser}/SkipSectionParserTests.java | 30 ++++----- .../parser}/TeardownSectionParserTests.java | 12 ++-- .../parser}/TestSectionParserTests.java | 46 ++++++------- ...ntYamlSuiteRestApiParserFailingTests.java} | 11 +-- .../ClientYamlSuiteRestApiParserTests.java} | 15 +++-- .../support}/FileUtilsTests.java | 4 +- 115 files changed, 870 insertions(+), 823 deletions(-) rename distribution/deb/src/test/java/org/elasticsearch/test/rest/{RestIT.java => DebClientYamlTestSuiteIT.java} (74%) create mode 100644 distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/IntegTestZipClientYamlTestSuiteIT.java rename distribution/{integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java => rpm/src/test/java/org/elasticsearch/test/rest/RpmClientYamlTestSuiteIT.java} (74%) rename distribution/{rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java => tar/src/test/java/org/elasticsearch/test/rest/TarClientYamlTestSuiteIT.java} (74%) delete mode 100644 distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java rename distribution/{tar/src/test/java/org/elasticsearch/test/rest/RestIT.java => zip/src/test/java/org/elasticsearch/test/rest/ZipClientYamlTestSuiteIT.java} (74%) rename docs/src/test/java/org/elasticsearch/smoketest/{SmokeTestDocsIT.java => DocsClientYamlTestSuiteIT.java} (80%) rename modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/{MatrixAggregationRestIT.java => MatrixStatsClientYamlTestSuiteIT.java} (73%) rename modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/{IngestCommonRestIT.java => IngestCommonClientYamlTestSuiteIT.java} (73%) rename modules/lang-expression/src/test/java/org/elasticsearch/script/expression/{ExpressionRestIT.java => LangExpressionClientYamlTestSuiteIT.java} (73%) rename modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/{GroovyRestIT.java => LangGroovyClientYamlTestSuiteIT.java} (73%) rename modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/{MustacheRestIT.java => LangMustacheClientYamlTestSuiteIT.java} (73%) rename modules/lang-painless/src/test/java/org/elasticsearch/painless/{PainlessRestIT.java => LangPainlessClientYamlTestSuiteIT.java} (73%) rename modules/percolator/src/test/java/org/elasticsearch/percolator/{PercolatorRestIT.java => PercolatorClientYamlTestSuiteIT.java} (73%) rename modules/reindex/src/test/java/org/elasticsearch/index/reindex/{ReindexRestIT.java => ReindexClientYamlTestSuiteIT.java} (74%) rename modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/{Netty3RestIT.java => Netty3ClientYamlTestSuiteIT.java} (74%) rename modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/{Netty4RestIT.java => Netty4ClientYamlTestSuiteIT.java} (77%) rename plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/{AnalysisICURestIT.java => IcuClientYamlTestSuiteIT.java} (74%) rename plugins/{analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java => analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiClientYamlTestSuiteIT.java} (73%) create mode 100644 plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/PhoneticClientYamlTestSuiteIT.java rename plugins/{analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java => analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SmartCNClientYamlTestSuiteIT.java} (73%) delete mode 100644 plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java rename plugins/{analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java => analysis-stempel/src/test/java/org/elasticsearch/index/analysis/StempelClientYamlTestSuiteIT.java} (73%) rename plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/{AzureDiscoveryRestIT.java => DiscoveryAzureClassicClientYamlTestSuiteIT.java} (72%) rename plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/{DiscoveryEc2RestIT.java => CloudAwsClientYamlTestSuiteIT.java} (73%) rename plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/{DiscoveryGCERestIT.java => DiscoveryGceClientYamlTestSuiteIT.java} (73%) rename plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/{IngestAttachmentRestIT.java => IngestAttachmentClientYamlTestSuiteIT.java} (73%) rename plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/{IngestGeoIpRestIT.java => IngestGeoIpClientYamlTestSuiteIT.java} (73%) rename plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/{UserAgentRestIT.java => IngestUserAgentClientYamlTestSuiteIT.java} (73%) rename plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/{JvmExampleRestIT.java => JvmExampleClientYamlTestSuiteIT.java} (73%) rename plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/{LangJavaScriptRestIT.java => LangJavascriptClientYamlTestSuiteIT.java} (73%) rename plugins/lang-python/src/test/java/org/elasticsearch/script/python/{LangPythonScriptRestIT.java => LangPythonClientYamlTestSuiteIT.java} (73%) rename plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/{MapperAttachmentsRestIT.java => MapperAttachmentsClientYamlTestSuiteIT.java} (72%) rename plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/{MapperMurmur3RestIT.java => MapperMurmur3ClientYamlTestSuiteIT.java} (73%) rename plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/{MapperSizeRestIT.java => MapperSizeClientYamlTestSuiteIT.java} (73%) rename plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/{AzureRepositoryRestIT.java => RepositoryAzureClientYamlTestSuiteIT.java} (73%) rename plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/{GoogleCloudStorageRepositoryRestIT.java => RepositoryGcsClientYamlTestSuiteIT.java} (74%) rename plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/{HdfsRepositoryRestIT.java => RepositoryHdfsClientYamlTestSuiteIT.java} (73%) rename plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/{RepositoryS3RestIT.java => RepositoryS3ClientYamlTestSuiteIT.java} (73%) rename plugins/store-smb/src/test/java/org/elasticsearch/index/store/{SMBStoreRestIT.java => StoreSmbClientYamlTestSuiteIT.java} (73%) rename qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/{MultiNodeBackwardsIT.java => Backwards50ClientYamlTestSuiteIT.java} (76%) rename qa/{smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiIT.java => smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestDisabledClientYamlTestSuiteIT.java} (72%) rename qa/{smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsIT.java => smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestWithAllDepsClientYamlTestSuiteIT.java} (72%) rename qa/{smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/IngestWithDependenciesIT.java => smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java} (72%) rename qa/{smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/IngestDisabledIT.java => smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java} (73%) create mode 100644 qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java delete mode 100644 qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessIT.java rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/BlacklistedPathPatternMatcher.java (98%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{RestTestCandidate.java => yaml/ClientYamlTestCandidate.java} (67%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{RestTestExecutionContext.java => yaml/ClientYamlTestExecutionContext.java} (79%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/ESClientYamlSuiteTestCase.java (83%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/ObjectPath.java (99%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/Stash.java (99%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{client/RestTestClient.java => yaml/client/ClientYamlTestClient.java} (84%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{client/RestTestResponse.java => yaml/client/ClientYamlTestResponse.java} (94%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{client/RestTestResponseException.java => yaml/client/ClientYamlTestResponseException.java} (80%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{parser/RestTestFragmentParser.java => yaml/parser/ClientYamlTestFragmentParser.java} (76%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{parser/RestTestParseException.java => yaml/parser/ClientYamlTestParseException.java} (80%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{parser/RestTestSectionParser.java => yaml/parser/ClientYamlTestSectionParser.java} (74%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{parser/RestTestSuiteParseContext.java => yaml/parser/ClientYamlTestSuiteParseContext.java} (74%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{parser/RestTestSuiteParser.java => yaml/parser/ClientYamlTestSuiteParser.java} (72%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/parser/DoSectionParser.java (89%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/parser/GreaterThanEqualToParser.java (70%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/parser/GreaterThanParser.java (72%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/parser/IsFalseParser.java (74%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/parser/IsTrueParser.java (74%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/parser/LengthParser.java (77%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/parser/LessThanOrEqualToParser.java (71%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/parser/LessThanParser.java (72%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/parser/MatchParser.java (77%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/parser/SetSectionParser.java (79%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/parser/SetupSectionParser.java (76%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/parser/SkipSectionParser.java (77%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/parser/TeardownSectionParser.java (76%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{spec/RestApi.java => yaml/restspec/ClientYamlSuiteRestApi.java} (76%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{spec/RestApiParser.java => yaml/restspec/ClientYamlSuiteRestApiParser.java} (94%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{client/RestPath.java => yaml/restspec/ClientYamlSuiteRestPath.java} (90%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{spec/RestSpec.java => yaml/restspec/ClientYamlSuiteRestSpec.java} (72%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/ApiCallSection.java (97%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/Assertion.java (86%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{section/TestSection.java => yaml/section/ClientYamlTestSection.java} (87%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{section/RestTestSuite.java => yaml/section/ClientYamlTestSuite.java} (81%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/DoSection.java (88%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/ExecutableSection.java (83%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/GreaterThanAssertion.java (97%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/GreaterThanEqualToAssertion.java (97%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/IsFalseAssertion.java (97%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/IsTrueAssertion.java (97%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/LengthAssertion.java (98%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/LessThanAssertion.java (97%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/LessThanOrEqualToAssertion.java (97%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/MatchAssertion.java (99%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/SetSection.java (87%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/SetupSection.java (97%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/SkipSection.java (97%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/section/TeardownSection.java (96%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/support/Features.java (97%) rename test/framework/src/main/java/org/elasticsearch/test/rest/{ => yaml}/support/FileUtils.java (99%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{ => yaml}/BlacklistedPathPatternMatcherTests.java (97%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test => yaml}/ObjectPathTests.java (98%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test => yaml}/StashTests.java (94%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test => yaml/parser}/AbstractParserTestCase.java (96%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test => yaml/parser}/AssertionParsersTests.java (78%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test/RestTestParserTests.java => yaml/parser/ClientYamlSuiteTestParserTests.java} (94%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test => yaml/parser}/DoSectionParserTests.java (89%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test => yaml/parser}/SetSectionParserTests.java (78%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test => yaml/parser}/SetupSectionParserTests.java (92%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test => yaml/parser}/SkipSectionParserTests.java (80%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test => yaml/parser}/TeardownSectionParserTests.java (90%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test => yaml/parser}/TestSectionParserTests.java (85%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test/RestApiParserFailingTests.java => yaml/restspec/ClientYamlSuiteRestApiParserFailingTests.java} (89%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test/RestApiParserTests.java => yaml/restspec/ClientYamlSuiteRestApiParserTests.java} (93%) rename test/framework/src/test/java/org/elasticsearch/test/rest/{test => yaml/support}/FileUtilsTests.java (98%) diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 0cebf280acb..a7dbb145e40 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -121,7 +121,7 @@ public class ExceptionSerializationTests extends ESTestCase { final Path startPath = PathUtils.get(ElasticsearchException.class.getProtectionDomain().getCodeSource().getLocation().toURI()) .resolve("org").resolve("elasticsearch"); final Set> ignore = Sets.newHashSet( - org.elasticsearch.test.rest.parser.RestTestParseException.class, + org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException.class, CancellableThreadsTests.CustomException.class, org.elasticsearch.rest.BytesRestResponseTests.WithHeadersException.class, AbstractClientHeadersTestCase.InternalException.class); diff --git a/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/deb/src/test/java/org/elasticsearch/test/rest/DebClientYamlTestSuiteIT.java similarity index 74% rename from distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java rename to distribution/deb/src/test/java/org/elasticsearch/test/rest/DebClientYamlTestSuiteIT.java index 94629641849..a94f0faf2d2 100644 --- a/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/deb/src/test/java/org/elasticsearch/test/rest/DebClientYamlTestSuiteIT.java @@ -20,18 +20,21 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ -public class RestIT extends ESClientYamlSuiteTestCase { - public RestIT(RestTestCandidate testCandidate) { +public class DebClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public DebClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return createParameters(0, 1); } } diff --git a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/IntegTestZipClientYamlTestSuiteIT.java b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/IntegTestZipClientYamlTestSuiteIT.java new file mode 100644 index 00000000000..ca54a3becd9 --- /dev/null +++ b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/IntegTestZipClientYamlTestSuiteIT.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.rest; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; + +import java.io.IOException; + +/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ +public class IntegTestZipClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public IntegTestZipClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, ClientYamlTestParseException { + return createParameters(0, 1); + } +} diff --git a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RpmClientYamlTestSuiteIT.java similarity index 74% rename from distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java rename to distribution/rpm/src/test/java/org/elasticsearch/test/rest/RpmClientYamlTestSuiteIT.java index 94629641849..a5b7f46269f 100644 --- a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RpmClientYamlTestSuiteIT.java @@ -20,18 +20,21 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ -public class RestIT extends ESClientYamlSuiteTestCase { - public RestIT(RestTestCandidate testCandidate) { +public class RpmClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public RpmClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return createParameters(0, 1); } } diff --git a/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/tar/src/test/java/org/elasticsearch/test/rest/TarClientYamlTestSuiteIT.java similarity index 74% rename from distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java rename to distribution/tar/src/test/java/org/elasticsearch/test/rest/TarClientYamlTestSuiteIT.java index 94629641849..73d323f7d50 100644 --- a/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/tar/src/test/java/org/elasticsearch/test/rest/TarClientYamlTestSuiteIT.java @@ -20,18 +20,21 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ -public class RestIT extends ESClientYamlSuiteTestCase { - public RestIT(RestTestCandidate testCandidate) { +public class TarClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public TarClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return createParameters(0, 1); } } diff --git a/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java deleted file mode 100644 index 94629641849..00000000000 --- a/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.test.rest; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.parser.RestTestParseException; - -import java.io.IOException; - -/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ -public class RestIT extends ESClientYamlSuiteTestCase { - public RestIT(RestTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { - return createParameters(0, 1); - } -} diff --git a/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/zip/src/test/java/org/elasticsearch/test/rest/ZipClientYamlTestSuiteIT.java similarity index 74% rename from distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java rename to distribution/zip/src/test/java/org/elasticsearch/test/rest/ZipClientYamlTestSuiteIT.java index 94629641849..329c8259d0b 100644 --- a/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/zip/src/test/java/org/elasticsearch/test/rest/ZipClientYamlTestSuiteIT.java @@ -20,18 +20,21 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ -public class RestIT extends ESClientYamlSuiteTestCase { - public RestIT(RestTestCandidate testCandidate) { +public class ZipClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public ZipClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return createParameters(0, 1); } } diff --git a/docs/src/test/java/org/elasticsearch/smoketest/SmokeTestDocsIT.java b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java similarity index 80% rename from docs/src/test/java/org/elasticsearch/smoketest/SmokeTestDocsIT.java rename to docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index d8eccda61a2..688eefc98d5 100644 --- a/docs/src/test/java/org/elasticsearch/smoketest/SmokeTestDocsIT.java +++ b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -22,21 +22,21 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; import java.util.List; -public class SmokeTestDocsIT extends ESClientYamlSuiteTestCase { +public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public SmokeTestDocsIT(@Name("yaml") RestTestCandidate testCandidate) { + public DocsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationRestIT.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixStatsClientYamlTestSuiteIT.java similarity index 73% rename from modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationRestIT.java rename to modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixStatsClientYamlTestSuiteIT.java index 47609494a61..6fb70ad3d63 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationRestIT.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixStatsClientYamlTestSuiteIT.java @@ -20,22 +20,20 @@ package org.elasticsearch.search.aggregations.matrix; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -/** - * - */ -public class MatrixAggregationRestIT extends ESClientYamlSuiteTestCase { - public MatrixAggregationRestIT(@Name("yaml")RestTestCandidate testCandidate) { +public class MatrixStatsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public MatrixStatsClientYamlTestSuiteIT(@Name("yaml")ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonRestIT.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonClientYamlTestSuiteIT.java similarity index 73% rename from modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonRestIT.java rename to modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonClientYamlTestSuiteIT.java index 65ee917f30e..3c71f5710fd 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonRestIT.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.ingest.common; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class IngestCommonRestIT extends ESClientYamlSuiteTestCase { +public class IngestCommonClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public IngestCommonRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public IngestCommonClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/LangExpressionClientYamlTestSuiteIT.java similarity index 73% rename from modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java rename to modules/lang-expression/src/test/java/org/elasticsearch/script/expression/LangExpressionClientYamlTestSuiteIT.java index 8ff5168a7cf..3d1071ee17c 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/LangExpressionClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.script.expression; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class ExpressionRestIT extends ESClientYamlSuiteTestCase { +public class LangExpressionClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public ExpressionRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public LangExpressionClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/LangGroovyClientYamlTestSuiteIT.java similarity index 73% rename from modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/LangGroovyClientYamlTestSuiteIT.java index b681e5f6857..c8e9c74827a 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/LangGroovyClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.script.groovy; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class GroovyRestIT extends ESClientYamlSuiteTestCase { +public class LangGroovyClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public GroovyRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public LangGroovyClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/LangMustacheClientYamlTestSuiteIT.java similarity index 73% rename from modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/LangMustacheClientYamlTestSuiteIT.java index 95f86dec3c7..160327dbab6 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/LangMustacheClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.script.mustache; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class MustacheRestIT extends ESClientYamlSuiteTestCase { +public class LangMustacheClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public MustacheRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public LangMustacheClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LangPainlessClientYamlTestSuiteIT.java similarity index 73% rename from modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/LangPainlessClientYamlTestSuiteIT.java index 353aa13f839..ca95dafd0b8 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LangPainlessClientYamlTestSuiteIT.java @@ -21,21 +21,22 @@ package org.elasticsearch.painless; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; /** Runs yaml rest tests */ -public class PainlessRestIT extends ESClientYamlSuiteTestCase { +public class LangPainlessClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public PainlessRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public LangPainlessClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorRestIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java similarity index 73% rename from modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorRestIT.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java index f4ec1b5215e..24e92715ede 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorRestIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java @@ -21,19 +21,20 @@ package org.elasticsearch.percolator; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class PercolatorRestIT extends ESClientYamlSuiteTestCase { - public PercolatorRestIT(@Name("yaml") RestTestCandidate testCandidate) { +public class PercolatorClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public PercolatorClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRestIT.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java similarity index 74% rename from modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRestIT.java rename to modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java index c67908f631d..186bb2f0a5e 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRestIT.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java @@ -22,19 +22,19 @@ package org.elasticsearch.index.reindex; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class ReindexRestIT extends ESClientYamlSuiteTestCase { - public ReindexRestIT(@Name("yaml") RestTestCandidate testCandidate) { +public class ReindexClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public ReindexClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3RestIT.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3ClientYamlTestSuiteIT.java similarity index 74% rename from modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3RestIT.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3ClientYamlTestSuiteIT.java index 9aa57bc312f..2fce8e3022d 100644 --- a/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3RestIT.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3ClientYamlTestSuiteIT.java @@ -22,20 +22,20 @@ package org.elasticsearch.http.netty3; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class Netty3RestIT extends ESClientYamlSuiteTestCase { +public class Netty3ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public Netty3RestIT(@Name("yaml") RestTestCandidate testCandidate) { + public Netty3ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4ClientYamlTestSuiteIT.java similarity index 77% rename from modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java rename to modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4ClientYamlTestSuiteIT.java index 71cbf70abb3..8f7483e2791 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4RestIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4ClientYamlTestSuiteIT.java @@ -21,25 +21,25 @@ package org.elasticsearch.http.netty4; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; + import org.apache.lucene.util.TimeUnits; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; //TODO: This is a *temporary* workaround to ensure a timeout does not mask other problems @TimeoutSuite(millis = 30 * TimeUnits.MINUTE) -public class Netty4RestIT extends ESClientYamlSuiteTestCase { +public class Netty4ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public Netty4RestIT(@Name("yaml") RestTestCandidate testCandidate) { + public Netty4ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuClientYamlTestSuiteIT.java similarity index 74% rename from plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java rename to plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuClientYamlTestSuiteIT.java index 17e5e1c3ca7..47224836037 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class AnalysisICURestIT extends ESClientYamlSuiteTestCase { +public class IcuClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public AnalysisICURestIT(@Name("yaml") RestTestCandidate testCandidate) { + public IcuClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiClientYamlTestSuiteIT.java similarity index 73% rename from plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java rename to plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiClientYamlTestSuiteIT.java index 58c86acea84..e99c5c2bacf 100644 --- a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class AnalysisSmartChineseRestIT extends ESClientYamlSuiteTestCase { +public class KuromojiClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public AnalysisSmartChineseRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public KuromojiClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/PhoneticClientYamlTestSuiteIT.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/PhoneticClientYamlTestSuiteIT.java new file mode 100644 index 00000000000..975b84f1574 --- /dev/null +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/PhoneticClientYamlTestSuiteIT.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; + +import java.io.IOException; + +public class PhoneticClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + + public PhoneticClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, ClientYamlTestParseException { + return ESClientYamlSuiteTestCase.createParameters(0, 1); + } +} + diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SmartCNClientYamlTestSuiteIT.java similarity index 73% rename from plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java rename to plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SmartCNClientYamlTestSuiteIT.java index 5697fb55bde..6415dc436eb 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java +++ b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SmartCNClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class AnalysisKuromojiRestIT extends ESClientYamlSuiteTestCase { +public class SmartCNClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public AnalysisKuromojiRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public SmartCNClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java deleted file mode 100644 index 54da5de8e4a..00000000000 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.analysis; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; - -import java.io.IOException; - -public class AnalysisPolishRestIT extends ESClientYamlSuiteTestCase { - - public AnalysisPolishRestIT(@Name("yaml") RestTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { - return ESClientYamlSuiteTestCase.createParameters(0, 1); - } -} - diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/StempelClientYamlTestSuiteIT.java similarity index 73% rename from plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java rename to plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/StempelClientYamlTestSuiteIT.java index 48f1c4b1716..34d264122ef 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/StempelClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class AnalysisPhoneticRestIT extends ESClientYamlSuiteTestCase { +public class StempelClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public AnalysisPhoneticRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public StempelClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryRestIT.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/DiscoveryAzureClassicClientYamlTestSuiteIT.java similarity index 72% rename from plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryRestIT.java rename to plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/DiscoveryAzureClassicClientYamlTestSuiteIT.java index 5506169f79d..30276c16c89 100644 --- a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryRestIT.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/DiscoveryAzureClassicClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.discovery.azure.classic; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class AzureDiscoveryRestIT extends ESClientYamlSuiteTestCase { +public class DiscoveryAzureClassicClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public AzureDiscoveryRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public DiscoveryAzureClassicClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/CloudAwsClientYamlTestSuiteIT.java similarity index 73% rename from plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java rename to plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/CloudAwsClientYamlTestSuiteIT.java index cc621dd5949..f5f49c14833 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/CloudAwsClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.cloud.aws; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class DiscoveryEc2RestIT extends ESClientYamlSuiteTestCase { +public class CloudAwsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public DiscoveryEc2RestIT(@Name("yaml") RestTestCandidate testCandidate) { + public CloudAwsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGceClientYamlTestSuiteIT.java similarity index 73% rename from plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java rename to plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGceClientYamlTestSuiteIT.java index dae943f7bd2..8ce17ff9fa5 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGceClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.discovery.gce; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class DiscoveryGCERestIT extends ESClientYamlSuiteTestCase { +public class DiscoveryGceClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public DiscoveryGCERestIT(@Name("yaml") RestTestCandidate testCandidate) { + public DiscoveryGceClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentRestIT.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentClientYamlTestSuiteIT.java similarity index 73% rename from plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentRestIT.java rename to plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentClientYamlTestSuiteIT.java index 671b5b0ab95..d720a4abf28 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentRestIT.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.ingest.attachment; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class IngestAttachmentRestIT extends ESClientYamlSuiteTestCase { +public class IngestAttachmentClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public IngestAttachmentRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public IngestAttachmentClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpClientYamlTestSuiteIT.java similarity index 73% rename from plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java rename to plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpClientYamlTestSuiteIT.java index 19cd7226d43..26838b600da 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpRestIT.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpClientYamlTestSuiteIT.java @@ -21,22 +21,21 @@ package org.elasticsearch.ingest.geoip; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -import java.util.Collection; -public class IngestGeoIpRestIT extends ESClientYamlSuiteTestCase { +public class IngestGeoIpClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public IngestGeoIpRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public IngestGeoIpClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentRestIT.java b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/IngestUserAgentClientYamlTestSuiteIT.java similarity index 73% rename from plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentRestIT.java rename to plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/IngestUserAgentClientYamlTestSuiteIT.java index 0ca96698e3b..b0aa115a1a2 100644 --- a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentRestIT.java +++ b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/IngestUserAgentClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.ingest.useragent; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class UserAgentRestIT extends ESClientYamlSuiteTestCase { +public class IngestUserAgentClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public UserAgentRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public IngestUserAgentClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleClientYamlTestSuiteIT.java similarity index 73% rename from plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java rename to plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleClientYamlTestSuiteIT.java index 62be8332439..0ef413d9595 100644 --- a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java +++ b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.plugin.example; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class JvmExampleRestIT extends ESClientYamlSuiteTestCase { +public class JvmExampleClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public JvmExampleRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public JvmExampleClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavascriptClientYamlTestSuiteIT.java similarity index 73% rename from plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java rename to plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavascriptClientYamlTestSuiteIT.java index 014c7073e71..e89372c8b36 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavascriptClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.script.javascript; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class LangJavaScriptRestIT extends ESClientYamlSuiteTestCase { +public class LangJavascriptClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public LangJavaScriptRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public LangJavascriptClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonClientYamlTestSuiteIT.java similarity index 73% rename from plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java rename to plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonClientYamlTestSuiteIT.java index cbf7547a580..618ea6b20e5 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.script.python; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class LangPythonScriptRestIT extends ESClientYamlSuiteTestCase { +public class LangPythonClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public LangPythonScriptRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public LangPythonClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsClientYamlTestSuiteIT.java similarity index 72% rename from plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java rename to plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsClientYamlTestSuiteIT.java index 0958225ae18..53a5c5c2813 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.mapper.attachments; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class MapperAttachmentsRestIT extends ESClientYamlSuiteTestCase { +public class MapperAttachmentsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public MapperAttachmentsRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public MapperAttachmentsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return createParameters(0, 1); } } diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java similarity index 73% rename from plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java rename to plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java index 37a4490d38c..204f6c07a99 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.index.mapper.murmur3; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class MapperMurmur3RestIT extends ESClientYamlSuiteTestCase { +public class MapperMurmur3ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public MapperMurmur3RestIT(@Name("yaml") RestTestCandidate testCandidate) { + public MapperMurmur3ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return createParameters(0, 1); } } diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeClientYamlTestSuiteIT.java similarity index 73% rename from plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java rename to plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeClientYamlTestSuiteIT.java index f5ee35c5638..44f26c4ec51 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.index.mapper.size; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class MapperSizeRestIT extends ESClientYamlSuiteTestCase { +public class MapperSizeClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public MapperSizeRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public MapperSizeClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return createParameters(0, 1); } } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java similarity index 73% rename from plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java rename to plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java index 3c565d407ce..c7eeff27401 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.repositories.azure; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class AzureRepositoryRestIT extends ESClientYamlSuiteTestCase { +public class RepositoryAzureClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public AzureRepositoryRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public RepositoryAzureClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/RepositoryGcsClientYamlTestSuiteIT.java similarity index 74% rename from plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java rename to plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/RepositoryGcsClientYamlTestSuiteIT.java index b19b8623ace..52145bf87e2 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/RepositoryGcsClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.repositories.gcs; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class GoogleCloudStorageRepositoryRestIT extends ESClientYamlSuiteTestCase { +public class RepositoryGcsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public GoogleCloudStorageRepositoryRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public RepositoryGcsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return createParameters(0, 1); } } diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java similarity index 73% rename from plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java rename to plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java index 147838df9ea..1dfbb3c51b7 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java @@ -22,18 +22,19 @@ import java.io.IOException; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; -public class HdfsRepositoryRestIT extends ESClientYamlSuiteTestCase { +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; - public HdfsRepositoryRestIT(@Name("yaml") RestTestCandidate testCandidate) { +public class RepositoryHdfsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + + public RepositoryHdfsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java similarity index 73% rename from plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java rename to plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java index 5dada2a2637..04c4f6fc0f1 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.repositories.s3; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class RepositoryS3RestIT extends ESClientYamlSuiteTestCase { +public class RepositoryS3ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public RepositoryS3RestIT(@Name("yaml") RestTestCandidate testCandidate) { + public RepositoryS3ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/StoreSmbClientYamlTestSuiteIT.java similarity index 73% rename from plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java rename to plugins/store-smb/src/test/java/org/elasticsearch/index/store/StoreSmbClientYamlTestSuiteIT.java index 99617b94644..72394b79693 100644 --- a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java +++ b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/StoreSmbClientYamlTestSuiteIT.java @@ -21,20 +21,21 @@ package org.elasticsearch.index.store; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class SMBStoreRestIT extends ESClientYamlSuiteTestCase { +public class StoreSmbClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public SMBStoreRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public StoreSmbClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } } diff --git a/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java b/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/Backwards50ClientYamlTestSuiteIT.java similarity index 76% rename from qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java rename to qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/Backwards50ClientYamlTestSuiteIT.java index e23ca840025..e3ab68b3477 100644 --- a/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/MultiNodeBackwardsIT.java +++ b/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/Backwards50ClientYamlTestSuiteIT.java @@ -22,21 +22,21 @@ package org.elasticsearch.backwards; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.lucene.util.TimeUnits; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; @TimeoutSuite(millis = 40 * TimeUnits.MINUTE) // some of the windows test VMs are slow as hell -public class MultiNodeBackwardsIT extends ESClientYamlSuiteTestCase { +public class Backwards50ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public MultiNodeBackwardsIT(RestTestCandidate testCandidate) { + public Backwards50ClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return createParameters(0, 1); } } diff --git a/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiIT.java b/qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestDisabledClientYamlTestSuiteIT.java similarity index 72% rename from qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiIT.java rename to qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestDisabledClientYamlTestSuiteIT.java index 78d7acd8c24..c8d506424c0 100644 --- a/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiIT.java +++ b/qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestDisabledClientYamlTestSuiteIT.java @@ -21,21 +21,22 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class SmokeTestMultiIT extends ESClientYamlSuiteTestCase { +public class SmokeTestIngestDisabledClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public SmokeTestMultiIT(@Name("yaml") RestTestCandidate testCandidate) { + public SmokeTestIngestDisabledClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } -} +} diff --git a/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsIT.java b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestWithAllDepsClientYamlTestSuiteIT.java similarity index 72% rename from qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsIT.java rename to qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestWithAllDepsClientYamlTestSuiteIT.java index 61903e62993..b3b84dfc55e 100644 --- a/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsIT.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestWithAllDepsClientYamlTestSuiteIT.java @@ -21,21 +21,22 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class SmokeTestPluginsIT extends ESClientYamlSuiteTestCase { +public class SmokeTestIngestWithAllDepsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public SmokeTestPluginsIT(@Name("yaml") RestTestCandidate testCandidate) { + public SmokeTestIngestWithAllDepsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } -} +} diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/IngestWithDependenciesIT.java b/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java similarity index 72% rename from qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/IngestWithDependenciesIT.java rename to qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java index 1ed23e7add5..456387e6c19 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/IngestWithDependenciesIT.java +++ b/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java @@ -21,21 +21,22 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class IngestWithDependenciesIT extends ESClientYamlSuiteTestCase { +public class SmokeTestMultiNodeClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public IngestWithDependenciesIT(@Name("yaml") RestTestCandidate testCandidate) { + public SmokeTestMultiNodeClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } - } + diff --git a/qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/IngestDisabledIT.java b/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java similarity index 73% rename from qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/IngestDisabledIT.java rename to qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java index d1cb62cc54b..05021f3c2bb 100644 --- a/qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/IngestDisabledIT.java +++ b/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java @@ -21,21 +21,22 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; -public class IngestDisabledIT extends ESClientYamlSuiteTestCase { +public class SmokeTestPluginsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public IngestDisabledIT(@Name("yaml") RestTestCandidate testCandidate) { + public SmokeTestPluginsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } - } + diff --git a/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java b/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java new file mode 100644 index 00000000000..db01ce2dfe9 --- /dev/null +++ b/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.smoketest; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; + +import java.io.IOException; + +public class SmokeTestReindexWithPainlessClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public SmokeTestReindexWithPainlessClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, ClientYamlTestParseException { + return ESClientYamlSuiteTestCase.createParameters(0, 1); + } +} diff --git a/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessIT.java b/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessIT.java deleted file mode 100644 index b7061432325..00000000000 --- a/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessIT.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.smoketest; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; - -import java.io.IOException; - -public class SmokeTestReindexWithPainlessIT extends ESClientYamlSuiteTestCase { - public SmokeTestReindexWithPainlessIT(@Name("yaml") RestTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { - return ESClientYamlSuiteTestCase.createParameters(0, 1); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index d5c14935ecd..92433649553 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.junit.internal.AssumptionViolatedException; import org.junit.runner.Description; import org.junit.runner.notification.Failure; @@ -37,9 +37,9 @@ import static com.carrotsearch.randomizedtesting.SysGlobals.SYSPROP_ITERATIONS; import static com.carrotsearch.randomizedtesting.SysGlobals.SYSPROP_PREFIX; import static com.carrotsearch.randomizedtesting.SysGlobals.SYSPROP_TESTMETHOD; import static org.elasticsearch.test.ESIntegTestCase.TESTS_CLUSTER; -import static org.elasticsearch.test.rest.ESClientYamlSuiteTestCase.REST_TESTS_BLACKLIST; -import static org.elasticsearch.test.rest.ESClientYamlSuiteTestCase.REST_TESTS_SPEC; -import static org.elasticsearch.test.rest.ESClientYamlSuiteTestCase.REST_TESTS_SUITE; +import static org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase.REST_TESTS_BLACKLIST; +import static org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase.REST_TESTS_SPEC; +import static org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase.REST_TESTS_SUITE; /** * A {@link RunListener} that emits a command you can use to re-run a failing test with the failing random seed to diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcher.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/BlacklistedPathPatternMatcher.java similarity index 98% rename from test/framework/src/main/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcher.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/BlacklistedPathPatternMatcher.java index e5bb75955cf..f0e0802a6b2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/BlacklistedPathPatternMatcher.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest; +package org.elasticsearch.test.rest.yaml; import java.util.regex.Pattern; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestCandidate.java similarity index 67% rename from test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestCandidate.java index 57c7e1b1305..dd650a38ebb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestCandidate.java @@ -16,23 +16,22 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest; +package org.elasticsearch.test.rest.yaml; -import org.elasticsearch.test.rest.section.RestTestSuite; -import org.elasticsearch.test.rest.section.SetupSection; -import org.elasticsearch.test.rest.section.TeardownSection; -import org.elasticsearch.test.rest.section.TestSection; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSuite; +import org.elasticsearch.test.rest.yaml.section.SetupSection; +import org.elasticsearch.test.rest.yaml.section.TeardownSection; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; /** - * Wraps {@link org.elasticsearch.test.rest.section.TestSection}s ready to be run. - * Each test section is associated to its {@link org.elasticsearch.test.rest.section.RestTestSuite}. + * Wraps {@link ClientYamlTestSection}s ready to be run. Each test section is associated to its {@link ClientYamlTestSuite}. */ -public class RestTestCandidate { +public class ClientYamlTestCandidate { - private final RestTestSuite restTestSuite; - private final TestSection testSection; + private final ClientYamlTestSuite restTestSuite; + private final ClientYamlTestSection testSection; - public RestTestCandidate(RestTestSuite restTestSuite, TestSection testSection) { + public ClientYamlTestCandidate(ClientYamlTestSuite restTestSuite, ClientYamlTestSection testSection) { this.restTestSuite = restTestSuite; this.testSection = testSection; } @@ -61,7 +60,7 @@ public class RestTestCandidate { return restTestSuite.getTeardownSection(); } - public TestSection getTestSection() { + public ClientYamlTestSection getTestSection() { return testSection; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java similarity index 79% rename from test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java index 59d45bc0a88..f99ee4be83f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest; +package org.elasticsearch.test.rest.yaml; import org.apache.http.HttpHost; import org.elasticsearch.Version; @@ -24,10 +24,10 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.test.rest.client.RestTestClient; -import org.elasticsearch.test.rest.client.RestTestResponse; -import org.elasticsearch.test.rest.client.RestTestResponseException; -import org.elasticsearch.test.rest.spec.RestSpec; +import org.elasticsearch.test.rest.yaml.client.ClientYamlTestClient; +import org.elasticsearch.test.rest.yaml.client.ClientYamlTestResponse; +import org.elasticsearch.test.rest.yaml.client.ClientYamlTestResponseException; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import java.io.IOException; import java.util.HashMap; @@ -40,19 +40,19 @@ import java.util.Map; * Caches the last obtained test response and allows to stash part of it within variables * that can be used as input values in following requests. */ -public class RestTestExecutionContext { +public class ClientYamlTestExecutionContext { - private static final ESLogger logger = Loggers.getLogger(RestTestExecutionContext.class); + private static final ESLogger logger = Loggers.getLogger(ClientYamlTestExecutionContext.class); private final Stash stash = new Stash(); - private final RestSpec restSpec; + private final ClientYamlSuiteRestSpec restSpec; - private RestTestClient restTestClient; + private ClientYamlTestClient restTestClient; - private RestTestResponse response; + private ClientYamlTestResponse response; - public RestTestExecutionContext(RestSpec restSpec) { + public ClientYamlTestExecutionContext(ClientYamlSuiteRestSpec restSpec) { this.restSpec = restSpec; } @@ -60,7 +60,7 @@ public class RestTestExecutionContext { * Calls an elasticsearch api with the parameters and request body provided as arguments. * Saves the obtained response in the execution context. */ - public RestTestResponse callApi(String apiName, Map params, List> bodies, + public ClientYamlTestResponse callApi(String apiName, Map params, List> bodies, Map headers) throws IOException { //makes a copy of the parameters before modifying them for this specific request HashMap requestParams = new HashMap<>(params); @@ -74,7 +74,7 @@ public class RestTestExecutionContext { try { response = callApiInternal(apiName, requestParams, body, headers); return response; - } catch(RestTestResponseException e) { + } catch(ClientYamlTestResponseException e) { response = e.getRestTestResponse(); throw e; } finally { @@ -103,7 +103,7 @@ public class RestTestExecutionContext { return XContentFactory.jsonBuilder().map(body).string(); } - private RestTestResponse callApiInternal(String apiName, Map params, String body, Map headers) + private ClientYamlTestResponse callApiInternal(String apiName, Map params, String body, Map headers) throws IOException { return restTestClient.callApi(apiName, params, body, headers); } @@ -119,7 +119,7 @@ public class RestTestExecutionContext { * Creates the embedded REST client when needed. Needs to be called before each test. */ public void initClient(RestClient client, List hosts) throws IOException { - restTestClient = new RestTestClient(restSpec, client, hosts); + restTestClient = new ClientYamlTestClient(restSpec, client, hosts); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java similarity index 83% rename from test/framework/src/main/java/org/elasticsearch/test/rest/ESClientYamlSuiteTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 2611e20d447..ae1853ccec3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.test.rest; +package org.elasticsearch.test.rest.yaml; import com.carrotsearch.randomizedtesting.RandomizedTest; @@ -25,16 +25,17 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.test.rest.parser.RestTestParseException; -import org.elasticsearch.test.rest.parser.RestTestSuiteParser; -import org.elasticsearch.test.rest.section.DoSection; -import org.elasticsearch.test.rest.section.ExecutableSection; -import org.elasticsearch.test.rest.section.RestTestSuite; -import org.elasticsearch.test.rest.section.SkipSection; -import org.elasticsearch.test.rest.section.TestSection; -import org.elasticsearch.test.rest.spec.RestApi; -import org.elasticsearch.test.rest.spec.RestSpec; -import org.elasticsearch.test.rest.support.FileUtils; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestSuiteParser; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSuite; +import org.elasticsearch.test.rest.yaml.section.DoSection; +import org.elasticsearch.test.rest.yaml.section.ExecutableSection; +import org.elasticsearch.test.rest.yaml.section.SkipSection; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; +import org.elasticsearch.test.rest.yaml.support.FileUtils; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -97,12 +98,12 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { private static final String PATHS_SEPARATOR = "(? blacklistPathMatchers = new ArrayList<>(); - private static RestTestExecutionContext restTestExecutionContext; - private static RestTestExecutionContext adminExecutionContext; + private static ClientYamlTestExecutionContext restTestExecutionContext; + private static ClientYamlTestExecutionContext adminExecutionContext; - private final RestTestCandidate testCandidate; + private final ClientYamlTestCandidate testCandidate; - public ESClientYamlSuiteTestCase(RestTestCandidate testCandidate) { + public ESClientYamlSuiteTestCase(ClientYamlTestCandidate testCandidate) { this.testCandidate = testCandidate; String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); for (String entry : blacklist) { @@ -117,34 +118,34 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { super.afterIfFailed(errors); } - public static Iterable createParameters(int id, int count) throws IOException, RestTestParseException { + public static Iterable createParameters(int id, int count) throws IOException, ClientYamlTestParseException { //parse tests only if rest test group is enabled, otherwise rest tests might not even be available on file system - List restTestCandidates = collectTestCandidates(id, count); + List restTestCandidates = collectTestCandidates(id, count); List objects = new ArrayList<>(); - for (RestTestCandidate restTestCandidate : restTestCandidates) { + for (ClientYamlTestCandidate restTestCandidate : restTestCandidates) { objects.add(new Object[]{restTestCandidate}); } return objects; } - private static List collectTestCandidates(int id, int count) throws RestTestParseException, IOException { - List testCandidates = new ArrayList<>(); + private static List collectTestCandidates(int id, int count) throws ClientYamlTestParseException, IOException { + List testCandidates = new ArrayList<>(); FileSystem fileSystem = getFileSystem(); // don't make a try-with, getFileSystem returns null // ... and you can't close() the default filesystem try { String[] paths = resolvePathsProperty(REST_TESTS_SUITE, DEFAULT_TESTS_PATH); Map> yamlSuites = FileUtils.findYamlSuites(fileSystem, DEFAULT_TESTS_PATH, paths); - RestTestSuiteParser restTestSuiteParser = new RestTestSuiteParser(); + ClientYamlTestSuiteParser restTestSuiteParser = new ClientYamlTestSuiteParser(); //yaml suites are grouped by directory (effectively by api) for (String api : yamlSuites.keySet()) { List yamlFiles = new ArrayList<>(yamlSuites.get(api)); for (Path yamlFile : yamlFiles) { String key = api + yamlFile.getFileName().toString(); if (mustExecute(key, id, count)) { - RestTestSuite restTestSuite = restTestSuiteParser.parse(api, yamlFile); - for (TestSection testSection : restTestSuite.getTestSections()) { - testCandidates.add(new RestTestCandidate(restTestSuite, testSection)); + ClientYamlTestSuite restTestSuite = restTestSuiteParser.parse(api, yamlFile); + for (ClientYamlTestSection testSection : restTestSuite.getTestSections()) { + testCandidates.add(new ClientYamlTestCandidate(restTestSuite, testSection)); } } } @@ -154,9 +155,9 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { } //sort the candidates so they will always be in the same order before being shuffled, for repeatability - Collections.sort(testCandidates, new Comparator() { + Collections.sort(testCandidates, new Comparator() { @Override - public int compare(RestTestCandidate o1, RestTestCandidate o2) { + public int compare(ClientYamlTestCandidate o1, ClientYamlTestCandidate o2) { return o1.getTestPath().compareTo(o2.getTestPath()); } }); @@ -209,29 +210,29 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { @BeforeClass public static void initExecutionContext() throws IOException { String[] specPaths = resolvePathsProperty(REST_TESTS_SPEC, DEFAULT_SPEC_PATH); - RestSpec restSpec = null; + ClientYamlSuiteRestSpec restSpec = null; FileSystem fileSystem = getFileSystem(); // don't make a try-with, getFileSystem returns null // ... and you can't close() the default filesystem try { - restSpec = RestSpec.parseFrom(fileSystem, DEFAULT_SPEC_PATH, specPaths); + restSpec = ClientYamlSuiteRestSpec.parseFrom(fileSystem, DEFAULT_SPEC_PATH, specPaths); } finally { IOUtils.close(fileSystem); } validateSpec(restSpec); - restTestExecutionContext = new RestTestExecutionContext(restSpec); - adminExecutionContext = new RestTestExecutionContext(restSpec); + restTestExecutionContext = new ClientYamlTestExecutionContext(restSpec); + adminExecutionContext = new ClientYamlTestExecutionContext(restSpec); } - protected RestTestExecutionContext getAdminExecutionContext() { + protected ClientYamlTestExecutionContext getAdminExecutionContext() { return adminExecutionContext; } - private static void validateSpec(RestSpec restSpec) { + private static void validateSpec(ClientYamlSuiteRestSpec restSpec) { boolean validateSpec = RandomizedTest.systemPropertyAsBoolean(REST_TESTS_VALIDATE_SPEC, true); if (validateSpec) { StringBuilder errorMessage = new StringBuilder(); - for (RestApi restApi : restSpec.getApis()) { + for (ClientYamlSuiteRestApi restApi : restSpec.getApis()) { if (restApi.getMethods().contains("GET") && restApi.isBodySupported()) { if (!restApi.getMethods().contains("POST")) { errorMessage.append("\n- ").append(restApi.getName()).append(" supports GET with a body but doesn't support POST"); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ObjectPath.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ObjectPath.java similarity index 99% rename from test/framework/src/main/java/org/elasticsearch/test/rest/ObjectPath.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ObjectPath.java index 8c492d279b0..6311944fdcb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ObjectPath.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ObjectPath.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest; +package org.elasticsearch.test.rest.yaml; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentParser; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java similarity index 99% rename from test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java index f687f2b39bf..dff1e59762e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.test.rest; +package org.elasticsearch.test.rest.yaml; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestClient.java similarity index 84% rename from test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestClient.java index 2fb0374d048..c21e905ed5c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestClient.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.client; +package org.elasticsearch.test.rest.yaml.client; import com.carrotsearch.randomizedtesting.RandomizedTest; @@ -34,8 +34,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.test.rest.spec.RestApi; -import org.elasticsearch.test.rest.spec.RestSpec; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestPath; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import java.io.IOException; import java.net.URI; @@ -49,20 +51,20 @@ import java.util.Objects; import java.util.Set; /** - * REST client used to test the elasticsearch REST layer. - * Wraps a {@link RestClient} instance used to send the REST requests. - * Holds the {@link RestSpec} used to translate api calls into REST calls + * Used by {@link ESClientYamlSuiteTestCase} to execute REST requests according to the tests written in yaml suite files. Wraps a + * {@link RestClient} instance used to send the REST requests. Holds the {@link ClientYamlSuiteRestSpec} used to translate api calls into + * REST calls. */ -public class RestTestClient { - private static final ESLogger logger = Loggers.getLogger(RestTestClient.class); +public class ClientYamlTestClient { + private static final ESLogger logger = Loggers.getLogger(ClientYamlTestClient.class); //query_string params that don't need to be declared in the spec, they are supported by default private static final Set ALWAYS_ACCEPTED_QUERY_STRING_PARAMS = Sets.newHashSet("pretty", "source", "filter_path"); - private final RestSpec restSpec; + private final ClientYamlSuiteRestSpec restSpec; private final RestClient restClient; private final Version esVersion; - public RestTestClient(RestSpec restSpec, RestClient restClient, List hosts) throws IOException { + public ClientYamlTestClient(ClientYamlSuiteRestSpec restSpec, RestClient restClient, List hosts) throws IOException { assert hosts.size() > 0; this.restSpec = restSpec; this.restClient = restClient; @@ -70,7 +72,7 @@ public class RestTestClient { } private Version readAndCheckVersion(List hosts) throws IOException { - RestApi restApi = restApi("info"); + ClientYamlSuiteRestApi restApi = restApi("info"); assert restApi.getPaths().size() == 1; assert restApi.getMethods().size() == 1; @@ -80,7 +82,7 @@ public class RestTestClient { String method = restApi.getMethods().get(0); String endpoint = restApi.getPaths().get(0); Response response = restClient.performRequest(method, endpoint); - RestTestResponse restTestResponse = new RestTestResponse(response); + ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response); Object latestVersion = restTestResponse.evaluate("version.number"); if (latestVersion == null) { throw new RuntimeException("elasticsearch version not found in the response"); @@ -103,7 +105,7 @@ public class RestTestClient { /** * Calls an api with the provided parameters and body */ - public RestTestResponse callApi(String apiName, Map params, String body, Map headers) + public ClientYamlTestResponse callApi(String apiName, Map params, String body, Map headers) throws IOException { if ("raw".equals(apiName)) { @@ -118,9 +120,9 @@ public class RestTestClient { // And everything else is a url parameter! try { Response response = restClient.performRequest(method, path, queryStringParams, entity); - return new RestTestResponse(response); + return new ClientYamlTestResponse(response); } catch(ResponseException e) { - throw new RestTestResponseException(e); + throw new ClientYamlTestResponseException(e); } } @@ -146,7 +148,7 @@ public class RestTestClient { //create doesn't exist in the spec but is supported in the clients (index with op_type=create) boolean indexCreateApi = "create".equals(apiName); String api = indexCreateApi ? "index" : apiName; - RestApi restApi = restApi(api); + ClientYamlSuiteRestApi restApi = restApi(api); //divide params between ones that go within query string and ones that go within path Map pathParts = new HashMap<>(); @@ -192,7 +194,7 @@ public class RestTestClient { } //the rest path to use is randomized out of the matching ones (if more than one) - RestPath restPath = RandomizedTest.randomFrom(restApi.getFinalPaths(pathParts)); + ClientYamlSuiteRestPath restPath = RandomizedTest.randomFrom(restApi.getFinalPaths(pathParts)); //Encode rules for path and query string parameters are different. We use URI to encode the path. //We need to encode each path part separately, as each one might contain slashes that need to be escaped, which needs to //be done manually. @@ -225,17 +227,17 @@ public class RestTestClient { logger.debug("calling api [{}]", apiName); try { Response response = restClient.performRequest(requestMethod, requestPath, queryStringParams, requestBody, requestHeaders); - return new RestTestResponse(response); + return new ClientYamlTestResponse(response); } catch(ResponseException e) { if (ignores.contains(e.getResponse().getStatusLine().getStatusCode())) { - return new RestTestResponse(e.getResponse()); + return new ClientYamlTestResponse(e.getResponse()); } - throw new RestTestResponseException(e); + throw new ClientYamlTestResponseException(e); } } - private RestApi restApi(String apiName) { - RestApi restApi = restSpec.getApi(apiName); + private ClientYamlSuiteRestApi restApi(String apiName) { + ClientYamlSuiteRestApi restApi = restSpec.getApi(apiName); if (restApi == null) { throw new IllegalArgumentException("rest api [" + apiName + "] doesn't exist in the rest spec"); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestResponse.java similarity index 94% rename from test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestResponse.java index 9149824c33c..9ccdd89592f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestResponse.java @@ -16,14 +16,14 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.client; +package org.elasticsearch.test.rest.yaml.client; import org.apache.http.client.methods.HttpHead; import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Response; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.Stash; +import org.elasticsearch.test.rest.yaml.ObjectPath; +import org.elasticsearch.test.rest.yaml.Stash; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -32,13 +32,13 @@ import java.nio.charset.StandardCharsets; * Response obtained from a REST call, eagerly reads the response body into a string for later optional parsing. * Supports parsing the response body when needed and returning specific values extracted from it. */ -public class RestTestResponse { +public class ClientYamlTestResponse { private final Response response; private final String body; private ObjectPath parsedResponse; - RestTestResponse(Response response) throws IOException { + ClientYamlTestResponse(Response response) throws IOException { this.response = response; if (response.getEntity() != null) { try { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponseException.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestResponseException.java similarity index 80% rename from test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponseException.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestResponseException.java index 2fc93a91088..73719202c10 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponseException.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/client/ClientYamlTestResponseException.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.test.rest.client; +package org.elasticsearch.test.rest.yaml.client; import org.elasticsearch.client.ResponseException; @@ -27,21 +27,21 @@ import java.io.IOException; * Exception obtained from a REST call in case the response code indicated an error. Eagerly reads the response body into a string * for later optional parsing. Supports parsing the response body when needed and returning specific values extracted from it. */ -public class RestTestResponseException extends IOException { +public class ClientYamlTestResponseException extends IOException { - private final RestTestResponse restTestResponse; + private final ClientYamlTestResponse restTestResponse; private final ResponseException responseException; - RestTestResponseException(ResponseException responseException) throws IOException { + ClientYamlTestResponseException(ResponseException responseException) throws IOException { super(responseException); this.responseException = responseException; - this.restTestResponse = new RestTestResponse(responseException.getResponse()); + this.restTestResponse = new ClientYamlTestResponse(responseException.getResponse()); } /** * Exposes the obtained response body */ - public RestTestResponse getRestTestResponse() { + public ClientYamlTestResponse getRestTestResponse() { return restTestResponse; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestFragmentParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestFragmentParser.java similarity index 76% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestFragmentParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestFragmentParser.java index 8d2bd8be76f..390ac1ce366 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestFragmentParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestFragmentParser.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; import java.io.IOException; @@ -24,10 +24,10 @@ import java.io.IOException; * Base parser for a REST test suite fragment * @param the test fragment's type that gets parsed and returned */ -public interface RestTestFragmentParser { +public interface ClientYamlTestFragmentParser { /** - * Parses a test fragment given the current {@link RestTestSuiteParseContext} + * Parses a test fragment given the current {@link ClientYamlTestSuiteParseContext} */ - T parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException; + T parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestParseException.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestParseException.java similarity index 80% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestParseException.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestParseException.java index 3e1af2cd749..594f701c79a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestParseException.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestParseException.java @@ -16,18 +16,18 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; /** * Exception thrown whenever there is a problem parsing any of the REST test suite fragment */ -public class RestTestParseException extends Exception { +public class ClientYamlTestParseException extends Exception { - RestTestParseException(String message) { + ClientYamlTestParseException(String message) { super(message); } - RestTestParseException(String message, Throwable cause) { + ClientYamlTestParseException(String message, Throwable cause) { super(message, cause); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestSectionParser.java similarity index 74% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestSectionParser.java index e1d2011e231..b6e8ad6c0f4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestSectionParser.java @@ -16,23 +16,23 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.rest.section.TestSection; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; import java.io.IOException; /** * Parser for a complete test section */ -public class RestTestSectionParser implements RestTestFragmentParser { +public class ClientYamlTestSectionParser implements ClientYamlTestFragmentParser { @Override - public TestSection parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public ClientYamlTestSection parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException { XContentParser parser = parseContext.parser(); parseContext.advanceToFieldName(); - TestSection testSection = new TestSection(parser.currentName()); + ClientYamlTestSection testSection = new ClientYamlTestSection(parser.currentName()); try { parser.nextToken(); testSection.setSkipSection(parseContext.parseSkipSection()); @@ -48,7 +48,7 @@ public class RestTestSectionParser implements RestTestFragmentParser> EXECUTABLE_SECTIONS_PARSERS = new HashMap<>(); + private static final Map> EXECUTABLE_SECTIONS_PARSERS = + new HashMap<>(); static { EXECUTABLE_SECTIONS_PARSERS.put("do", DO_SECTION_PARSER); EXECUTABLE_SECTIONS_PARSERS.put("set", new SetSectionParser()); @@ -62,7 +62,7 @@ public class RestTestSuiteParseContext { private final String suiteName; private final XContentParser parser; - public RestTestSuiteParseContext(String api, String suiteName, XContentParser parser) { + public ClientYamlTestSuiteParseContext(String api, String suiteName, XContentParser parser) { this.api = api; this.suiteName = suiteName; this.parser = parser; @@ -80,7 +80,7 @@ public class RestTestSuiteParseContext { return parser; } - public SetupSection parseSetupSection() throws IOException, RestTestParseException { + public SetupSection parseSetupSection() throws IOException, ClientYamlTestParseException { advanceToFieldName(); @@ -94,7 +94,7 @@ public class RestTestSuiteParseContext { return SetupSection.EMPTY; } - public TeardownSection parseTeardownSection() throws IOException, RestTestParseException { + public TeardownSection parseTeardownSection() throws IOException, ClientYamlTestParseException { advanceToFieldName(); if ("teardown".equals(parser.currentName())) { @@ -107,11 +107,11 @@ public class RestTestSuiteParseContext { return TeardownSection.EMPTY; } - public TestSection parseTestSection() throws IOException, RestTestParseException { + public ClientYamlTestSection parseTestSection() throws IOException, ClientYamlTestParseException { return TEST_SECTION_PARSER.parse(this); } - public SkipSection parseSkipSection() throws IOException, RestTestParseException { + public SkipSection parseSkipSection() throws IOException, ClientYamlTestParseException { advanceToFieldName(); @@ -124,12 +124,12 @@ public class RestTestSuiteParseContext { return SkipSection.EMPTY; } - public ExecutableSection parseExecutableSection() throws IOException, RestTestParseException { + public ExecutableSection parseExecutableSection() throws IOException, ClientYamlTestParseException { advanceToFieldName(); String section = parser.currentName(); - RestTestFragmentParser execSectionParser = EXECUTABLE_SECTIONS_PARSERS.get(section); + ClientYamlTestFragmentParser execSectionParser = EXECUTABLE_SECTIONS_PARSERS.get(section); if (execSectionParser == null) { - throw new RestTestParseException("no parser found for executable section [" + section + "]"); + throw new ClientYamlTestParseException("no parser found for executable section [" + section + "]"); } XContentLocation location = parser.getTokenLocation(); try { @@ -141,11 +141,11 @@ public class RestTestSuiteParseContext { } } - public DoSection parseDoSection() throws IOException, RestTestParseException { + public DoSection parseDoSection() throws IOException, ClientYamlTestParseException { return DO_SECTION_PARSER.parse(this); } - public void advanceToFieldName() throws IOException, RestTestParseException { + public void advanceToFieldName() throws IOException, ClientYamlTestParseException { XContentParser.Token token = parser.currentToken(); //we are in the beginning, haven't called nextToken yet if (token == null) { @@ -158,12 +158,12 @@ public class RestTestSuiteParseContext { token = parser.nextToken(); } if (token != XContentParser.Token.FIELD_NAME) { - throw new RestTestParseException("malformed test section: field name expected but found " + token + " at " + throw new ClientYamlTestParseException("malformed test section: field name expected but found " + token + " at " + parser.getTokenLocation()); } } - public String parseField() throws IOException, RestTestParseException { + public String parseField() throws IOException, ClientYamlTestParseException { parser.nextToken(); assert parser.currentToken().isValue(); String field = parser.text(); @@ -171,7 +171,7 @@ public class RestTestSuiteParseContext { return field; } - public Tuple parseTuple() throws IOException, RestTestParseException { + public Tuple parseTuple() throws IOException, ClientYamlTestParseException { parser.nextToken(); advanceToFieldName(); Map map = parser.map(); @@ -179,7 +179,7 @@ public class RestTestSuiteParseContext { parser.nextToken(); if (map.size() != 1) { - throw new RestTestParseException("expected key value pair but found " + map.size() + " "); + throw new ClientYamlTestParseException("expected key value pair but found " + map.size() + " "); } Map.Entry entry = map.entrySet().iterator().next(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestSuiteParser.java similarity index 72% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestSuiteParser.java index f22f0109594..65277888471 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlTestSuiteParser.java @@ -16,13 +16,12 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.yaml.YamlXContent; -import org.elasticsearch.test.rest.section.RestTestSuite; -import org.elasticsearch.test.rest.section.TeardownSection; -import org.elasticsearch.test.rest.section.TestSection; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSuite; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; import java.io.IOException; import java.nio.ByteBuffer; @@ -34,9 +33,9 @@ import java.nio.file.StandardOpenOption; /** * Parser for a complete test suite (yaml file) */ -public class RestTestSuiteParser implements RestTestFragmentParser { +public class ClientYamlTestSuiteParser implements ClientYamlTestFragmentParser { - public RestTestSuite parse(String api, Path file) throws IOException, RestTestParseException { + public ClientYamlTestSuite parse(String api, Path file) throws IOException, ClientYamlTestParseException { if (!Files.isRegularFile(file)) { throw new IllegalArgumentException(file.toAbsolutePath() + " is not a file"); @@ -54,27 +53,27 @@ public class RestTestSuiteParser implements RestTestFragmentParser { +public class DoSectionParser implements ClientYamlTestFragmentParser { @Override - public DoSection parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public DoSection parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException { XContentParser parser = parseContext.parser(); @@ -91,7 +91,7 @@ public class DoSectionParser implements RestTestFragmentParser { } try { if (apiCallSection == null) { - throw new RestTestParseException("client call section is mandatory within a do section"); + throw new ClientYamlTestParseException("client call section is mandatory within a do section"); } if (headers.isEmpty() == false) { apiCallSection.addHeaders(headers); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/GreaterThanEqualToParser.java similarity index 70% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/GreaterThanEqualToParser.java index 7a4cd0f316a..1ed71075970 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/GreaterThanEqualToParser.java @@ -17,23 +17,23 @@ * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.test.rest.section.GreaterThanEqualToAssertion; +import org.elasticsearch.test.rest.yaml.section.GreaterThanEqualToAssertion; import java.io.IOException; /** * Parser for gte assert sections */ -public class GreaterThanEqualToParser implements RestTestFragmentParser { - +public class GreaterThanEqualToParser implements ClientYamlTestFragmentParser { @Override - public GreaterThanEqualToAssertion parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public GreaterThanEqualToAssertion parse(ClientYamlTestSuiteParseContext parseContext) + throws IOException, ClientYamlTestParseException { Tuple stringObjectTuple = parseContext.parseTuple(); if (! (stringObjectTuple.v2() instanceof Comparable) ) { - throw new RestTestParseException("gte section can only be used with objects that support natural ordering, found " + throw new ClientYamlTestParseException("gte section can only be used with objects that support natural ordering, found " + stringObjectTuple.v2().getClass().getSimpleName()); } return new GreaterThanEqualToAssertion(stringObjectTuple.v1(), stringObjectTuple.v2()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/GreaterThanParser.java similarity index 72% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/GreaterThanParser.java index 7e1ca1ece7f..ca76d486ab4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/GreaterThanParser.java @@ -16,23 +16,23 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.test.rest.section.GreaterThanAssertion; +import org.elasticsearch.test.rest.yaml.section.GreaterThanAssertion; import java.io.IOException; /** * Parser for gt assert sections */ -public class GreaterThanParser implements RestTestFragmentParser { +public class GreaterThanParser implements ClientYamlTestFragmentParser { @Override - public GreaterThanAssertion parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public GreaterThanAssertion parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException { Tuple stringObjectTuple = parseContext.parseTuple(); if (! (stringObjectTuple.v2() instanceof Comparable) ) { - throw new RestTestParseException("gt section can only be used with objects that support natural ordering, found " + throw new ClientYamlTestParseException("gt section can only be used with objects that support natural ordering, found " + stringObjectTuple.v2().getClass().getSimpleName()); } return new GreaterThanAssertion(stringObjectTuple.v1(), stringObjectTuple.v2()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsFalseParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/IsFalseParser.java similarity index 74% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsFalseParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/IsFalseParser.java index 81cade6d84b..8a41df99002 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsFalseParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/IsFalseParser.java @@ -16,19 +16,19 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; -import org.elasticsearch.test.rest.section.IsFalseAssertion; +import org.elasticsearch.test.rest.yaml.section.IsFalseAssertion; import java.io.IOException; /** * Parser for is_false assert sections */ -public class IsFalseParser implements RestTestFragmentParser { +public class IsFalseParser implements ClientYamlTestFragmentParser { @Override - public IsFalseAssertion parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public IsFalseAssertion parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException { return new IsFalseAssertion(parseContext.parseField()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsTrueParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/IsTrueParser.java similarity index 74% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsTrueParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/IsTrueParser.java index 922629b47ef..228cc29c262 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/IsTrueParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/IsTrueParser.java @@ -16,19 +16,19 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; -import org.elasticsearch.test.rest.section.IsTrueAssertion; +import org.elasticsearch.test.rest.yaml.section.IsTrueAssertion; import java.io.IOException; /** * Parser for is_true assert sections */ -public class IsTrueParser implements RestTestFragmentParser { +public class IsTrueParser implements ClientYamlTestFragmentParser { @Override - public IsTrueAssertion parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public IsTrueAssertion parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException { return new IsTrueAssertion(parseContext.parseField()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LengthParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/LengthParser.java similarity index 77% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/LengthParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/LengthParser.java index 414be59f4cd..8a206130cb5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LengthParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/LengthParser.java @@ -16,20 +16,20 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.test.rest.section.LengthAssertion; +import org.elasticsearch.test.rest.yaml.section.LengthAssertion; import java.io.IOException; /** * Parser for length assert sections */ -public class LengthParser implements RestTestFragmentParser { +public class LengthParser implements ClientYamlTestFragmentParser { @Override - public LengthAssertion parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public LengthAssertion parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException { Tuple stringObjectTuple = parseContext.parseTuple(); assert stringObjectTuple.v2() != null; int value; @@ -39,7 +39,7 @@ public class LengthParser implements RestTestFragmentParser { try { value = Integer.valueOf(stringObjectTuple.v2().toString()); } catch(NumberFormatException e) { - throw new RestTestParseException("length is not a valid number", e); + throw new ClientYamlTestParseException("length is not a valid number", e); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/LessThanOrEqualToParser.java similarity index 71% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/LessThanOrEqualToParser.java index a30979c6a3c..9fc43212548 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/LessThanOrEqualToParser.java @@ -17,23 +17,23 @@ * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.test.rest.section.LessThanOrEqualToAssertion; +import org.elasticsearch.test.rest.yaml.section.LessThanOrEqualToAssertion; import java.io.IOException; /** * Parser for lte assert section */ -public class LessThanOrEqualToParser implements RestTestFragmentParser { +public class LessThanOrEqualToParser implements ClientYamlTestFragmentParser { @Override - public LessThanOrEqualToAssertion parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public LessThanOrEqualToAssertion parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException { Tuple stringObjectTuple = parseContext.parseTuple(); if (! (stringObjectTuple.v2() instanceof Comparable) ) { - throw new RestTestParseException("lte section can only be used with objects that support natural ordering, found " + throw new ClientYamlTestParseException("lte section can only be used with objects that support natural ordering, found " + stringObjectTuple.v2().getClass().getSimpleName()); } return new LessThanOrEqualToAssertion(stringObjectTuple.v1(), stringObjectTuple.v2()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/LessThanParser.java similarity index 72% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/LessThanParser.java index fc31f221758..f244bd68f93 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/LessThanParser.java @@ -16,23 +16,23 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.test.rest.section.LessThanAssertion; +import org.elasticsearch.test.rest.yaml.section.LessThanAssertion; import java.io.IOException; /** * Parser for lt assert sections */ -public class LessThanParser implements RestTestFragmentParser { +public class LessThanParser implements ClientYamlTestFragmentParser { @Override - public LessThanAssertion parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public LessThanAssertion parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException { Tuple stringObjectTuple = parseContext.parseTuple(); if (! (stringObjectTuple.v2() instanceof Comparable) ) { - throw new RestTestParseException("lt section can only be used with objects that support natural ordering, found " + throw new ClientYamlTestParseException("lt section can only be used with objects that support natural ordering, found " + stringObjectTuple.v2().getClass().getSimpleName()); } return new LessThanAssertion(stringObjectTuple.v1(), stringObjectTuple.v2()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/MatchParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/MatchParser.java similarity index 77% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/MatchParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/MatchParser.java index 30ee18a4e05..35f3fc160bf 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/MatchParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/MatchParser.java @@ -16,20 +16,20 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.test.rest.section.MatchAssertion; +import org.elasticsearch.test.rest.yaml.section.MatchAssertion; import java.io.IOException; /** * Parser for match assert sections */ -public class MatchParser implements RestTestFragmentParser { +public class MatchParser implements ClientYamlTestFragmentParser { @Override - public MatchAssertion parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public MatchAssertion parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException { Tuple stringObjectTuple = parseContext.parseTuple(); return new MatchAssertion(stringObjectTuple.v1(), stringObjectTuple.v2()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/SetSectionParser.java similarity index 79% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/SetSectionParser.java index 8afafc09f7a..2686593e10a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetSectionParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/SetSectionParser.java @@ -16,20 +16,20 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.rest.section.SetSection; +import org.elasticsearch.test.rest.yaml.section.SetSection; import java.io.IOException; /** * Parser for set sections */ -public class SetSectionParser implements RestTestFragmentParser { +public class SetSectionParser implements ClientYamlTestFragmentParser { @Override - public SetSection parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public SetSection parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException { XContentParser parser = parseContext.parser(); @@ -49,7 +49,7 @@ public class SetSectionParser implements RestTestFragmentParser { parser.nextToken(); if (setSection.getStash().isEmpty()) { - throw new RestTestParseException("set section must set at least a value"); + throw new ClientYamlTestParseException("set section must set at least a value"); } return setSection; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetupSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/SetupSectionParser.java similarity index 76% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetupSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/SetupSectionParser.java index 2a2e39ea744..e62b3af5251 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/SetupSectionParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/SetupSectionParser.java @@ -16,20 +16,20 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.rest.section.SetupSection; +import org.elasticsearch.test.rest.yaml.section.SetupSection; import java.io.IOException; /** * Parser for setup sections */ -public class SetupSectionParser implements RestTestFragmentParser { +public class SetupSectionParser implements ClientYamlTestFragmentParser { @Override - public SetupSection parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public SetupSection parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException { XContentParser parser = parseContext.parser(); @@ -39,7 +39,7 @@ public class SetupSectionParser implements RestTestFragmentParser while (parser.currentToken() != XContentParser.Token.END_ARRAY) { parseContext.advanceToFieldName(); if (!"do".equals(parser.currentName())) { - throw new RestTestParseException("section [" + parser.currentName() + "] not supported within setup section"); + throw new ClientYamlTestParseException("section [" + parser.currentName() + "] not supported within setup section"); } parser.nextToken(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/SkipSectionParser.java similarity index 77% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/SkipSectionParser.java index 33733821019..31451dee247 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/SkipSectionParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/SkipSectionParser.java @@ -16,11 +16,11 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.rest.section.SkipSection; +import org.elasticsearch.test.rest.yaml.section.SkipSection; import java.io.IOException; import java.util.ArrayList; @@ -29,10 +29,10 @@ import java.util.List; /** * Parser for skip sections */ -public class SkipSectionParser implements RestTestFragmentParser { +public class SkipSectionParser implements ClientYamlTestFragmentParser { @Override - public SkipSection parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public SkipSection parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException { XContentParser parser = parseContext.parser(); @@ -54,7 +54,7 @@ public class SkipSectionParser implements RestTestFragmentParser { features.add(parser.text()); } else { - throw new RestTestParseException("field " + currentFieldName + " not supported within skip section"); + throw new ClientYamlTestParseException("field " + currentFieldName + " not supported within skip section"); } } else if (token == XContentParser.Token.START_ARRAY) { if ("features".equals(currentFieldName)) { @@ -68,13 +68,13 @@ public class SkipSectionParser implements RestTestFragmentParser { parser.nextToken(); if (!Strings.hasLength(version) && features.isEmpty()) { - throw new RestTestParseException("version or features is mandatory within skip section"); + throw new ClientYamlTestParseException("version or features is mandatory within skip section"); } if (Strings.hasLength(version) && !features.isEmpty()) { - throw new RestTestParseException("version or features are mutually exclusive"); + throw new ClientYamlTestParseException("version or features are mutually exclusive"); } if (Strings.hasLength(version) && !Strings.hasLength(reason)) { - throw new RestTestParseException("reason is mandatory within skip version section"); + throw new ClientYamlTestParseException("reason is mandatory within skip version section"); } return new SkipSection(version, features, reason); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/TeardownSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/TeardownSectionParser.java similarity index 76% rename from test/framework/src/main/java/org/elasticsearch/test/rest/parser/TeardownSectionParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/TeardownSectionParser.java index 428a21e2e06..ed1b42c3a9c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/TeardownSectionParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/parser/TeardownSectionParser.java @@ -17,20 +17,20 @@ * under the License. */ -package org.elasticsearch.test.rest.parser; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.rest.section.TeardownSection; +import org.elasticsearch.test.rest.yaml.section.TeardownSection; import java.io.IOException; /** * Parser for teardown section */ -public class TeardownSectionParser implements RestTestFragmentParser { +public class TeardownSectionParser implements ClientYamlTestFragmentParser { @Override - public TeardownSection parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + public TeardownSection parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException { XContentParser parser = parseContext.parser(); TeardownSection teardownSection = new TeardownSection(); @@ -39,7 +39,7 @@ public class TeardownSectionParser implements RestTestFragmentParser paths = new ArrayList<>(); private List pathParts = new ArrayList<>(); private List params = new ArrayList<>(); - private BODY body = BODY.NOT_SUPPORTED; + private Body body = Body.NOT_SUPPORTED; - public enum BODY { + public enum Body { NOT_SUPPORTED, OPTIONAL, REQUIRED } - RestApi(String location, String name) { + ClientYamlSuiteRestApi(String location, String name) { this.location = location; this.name = name; } @@ -116,34 +115,34 @@ public class RestApi { } void setBodyOptional() { - this.body = BODY.OPTIONAL; + this.body = Body.OPTIONAL; } void setBodyRequired() { - this.body = BODY.REQUIRED; + this.body = Body.REQUIRED; } public boolean isBodySupported() { - return body != BODY.NOT_SUPPORTED; + return body != Body.NOT_SUPPORTED; } public boolean isBodyRequired() { - return body == BODY.REQUIRED; + return body == Body.REQUIRED; } /** * Finds the best matching rest path given the current parameters and replaces * placeholders with their corresponding values received as arguments */ - public RestPath[] getFinalPaths(Map pathParams) { - List matchingRestPaths = findMatchingRestPaths(pathParams.keySet()); + public ClientYamlSuiteRestPath[] getFinalPaths(Map pathParams) { + List matchingRestPaths = findMatchingRestPaths(pathParams.keySet()); if (matchingRestPaths == null || matchingRestPaths.isEmpty()) { throw new IllegalArgumentException("unable to find matching rest path for api [" + name + "] and path params " + pathParams); } - RestPath[] restPaths = new RestPath[matchingRestPaths.size()]; + ClientYamlSuiteRestPath[] restPaths = new ClientYamlSuiteRestPath[matchingRestPaths.size()]; for (int i = 0; i < matchingRestPaths.size(); i++) { - RestPath restPath = matchingRestPaths.get(i); + ClientYamlSuiteRestPath restPath = matchingRestPaths.get(i); restPaths[i] = restPath.replacePlaceholders(pathParams); } return restPaths; @@ -155,11 +154,11 @@ public class RestApi { * The best path is the one that has exactly the same number of placeholders to replace * (e.g. /{index}/{type}/{id} when the path params are exactly index, type and id). */ - private List findMatchingRestPaths(Set restParams) { + private List findMatchingRestPaths(Set restParams) { - List matchingRestPaths = new ArrayList<>(); - RestPath[] restPaths = buildRestPaths(); - for (RestPath restPath : restPaths) { + List matchingRestPaths = new ArrayList<>(); + ClientYamlSuiteRestPath[] restPaths = buildRestPaths(); + for (ClientYamlSuiteRestPath restPath : restPaths) { if (restPath.matches(restParams)) { matchingRestPaths.add(restPath); } @@ -167,10 +166,10 @@ public class RestApi { return matchingRestPaths; } - private RestPath[] buildRestPaths() { - RestPath[] restPaths = new RestPath[paths.size()]; + private ClientYamlSuiteRestPath[] buildRestPaths() { + ClientYamlSuiteRestPath[] restPaths = new ClientYamlSuiteRestPath[paths.size()]; for (int i = 0; i < restPaths.length; i++) { - restPaths[i] = new RestPath(paths.get(i)); + restPaths[i] = new ClientYamlSuiteRestPath(paths.get(i)); } return restPaths; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParser.java similarity index 94% rename from test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParser.java index 95fe132471a..178fb9c5f34 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParser.java @@ -16,24 +16,24 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.spec; +package org.elasticsearch.test.rest.yaml.restspec; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; /** - * Parser for a REST api spec (single json file) + * Parser for a {@link ClientYamlSuiteRestApi}. */ -public class RestApiParser { +public class ClientYamlSuiteRestApiParser { - public RestApi parse(String location, XContentParser parser) throws IOException { + public ClientYamlSuiteRestApi parse(String location, XContentParser parser) throws IOException { while ( parser.nextToken() != XContentParser.Token.FIELD_NAME ) { //move to first field name } - RestApi restApi = new RestApi(location, parser.currentName()); + ClientYamlSuiteRestApi restApi = new ClientYamlSuiteRestApi(location, parser.currentName()); int level = -1; while (parser.nextToken() != XContentParser.Token.END_OBJECT || level >= 0) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestPath.java similarity index 90% rename from test/framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestPath.java index ed1ce728c0b..f54994138e0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestPath.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestPath.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.test.rest.client; +package org.elasticsearch.test.rest.yaml.restspec; import java.util.ArrayList; import java.util.Collections; @@ -25,11 +25,11 @@ import java.util.List; import java.util.Map; import java.util.Set; -public class RestPath { +public class ClientYamlSuiteRestPath { private final List parts; private final List placeholders; - public RestPath(List parts) { + public ClientYamlSuiteRestPath(List parts) { List pathParts = new ArrayList<>(parts.size()); for (String part : parts) { pathParts.add(new PathPart(part, false)); @@ -38,7 +38,7 @@ public class RestPath { this.placeholders = Collections.emptyList(); } - public RestPath(String path) { + public ClientYamlSuiteRestPath(String path) { String[] pathParts = path.split("/"); List placeholders = new ArrayList<>(); List parts = new ArrayList<>(); @@ -73,7 +73,7 @@ public class RestPath { return placeholders.size() == params.size() && placeholders.containsAll(params); } - public RestPath replacePlaceholders(Map params) { + public ClientYamlSuiteRestPath replacePlaceholders(Map params) { List finalPathParts = new ArrayList<>(parts.size()); for (PathPart pathPart : parts) { if (pathPart.isPlaceholder) { @@ -86,7 +86,7 @@ public class RestPath { finalPathParts.add(pathPart.pathPart); } } - return new RestPath(finalPathParts); + return new ClientYamlSuiteRestPath(finalPathParts); } private static class PathPart { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java similarity index 72% rename from test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java index c6ea48fd6ef..d0cfc839fcc 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java @@ -16,11 +16,11 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.spec; +package org.elasticsearch.test.rest.yaml.restspec; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.rest.support.FileUtils; +import org.elasticsearch.test.rest.yaml.support.FileUtils; import java.io.IOException; import java.io.InputStream; @@ -32,41 +32,41 @@ import java.util.HashMap; import java.util.Map; /** - * Holds the elasticsearch REST spec + * Holds the specification used to turn {@code do} actions in the YAML suite into REST api calls. */ -public class RestSpec { - Map restApiMap = new HashMap<>(); +public class ClientYamlSuiteRestSpec { + Map restApiMap = new HashMap<>(); - private RestSpec() { + private ClientYamlSuiteRestSpec() { } - void addApi(RestApi restApi) { - RestApi previous = restApiMap.putIfAbsent(restApi.getName(), restApi); + void addApi(ClientYamlSuiteRestApi restApi) { + ClientYamlSuiteRestApi previous = restApiMap.putIfAbsent(restApi.getName(), restApi); if (previous != null) { throw new IllegalArgumentException("cannot register api [" + restApi.getName() + "] found in [" + restApi.getLocation() + "]. " + "api with same name was already found in [" + previous.getLocation() + "]"); } } - public RestApi getApi(String api) { + public ClientYamlSuiteRestApi getApi(String api) { return restApiMap.get(api); } - public Collection getApis() { + public Collection getApis() { return restApiMap.values(); } /** * Parses the complete set of REST spec available under the provided directories */ - public static RestSpec parseFrom(FileSystem fileSystem, String optionalPathPrefix, String... paths) throws IOException { - RestSpec restSpec = new RestSpec(); - RestApiParser restApiParser = new RestApiParser(); + public static ClientYamlSuiteRestSpec parseFrom(FileSystem fileSystem, String optionalPathPrefix, String... paths) throws IOException { + ClientYamlSuiteRestSpec restSpec = new ClientYamlSuiteRestSpec(); + ClientYamlSuiteRestApiParser restApiParser = new ClientYamlSuiteRestApiParser(); for (String path : paths) { for (Path jsonFile : FileUtils.findJsonSpec(fileSystem, optionalPathPrefix, path)) { try (InputStream stream = Files.newInputStream(jsonFile)) { try (XContentParser parser = JsonXContent.jsonXContent.createParser(stream)) { - RestApi restApi = restApiParser.parse(jsonFile.toString(), parser); + ClientYamlSuiteRestApi restApi = restApiParser.parse(jsonFile.toString(), parser); String filename = jsonFile.getFileName().toString(); String expectedApiName = filename.substring(0, filename.lastIndexOf('.')); if (restApi.getName().equals(expectedApiName) == false) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java similarity index 97% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java index 030469148ed..5d097f872b4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/ApiCallSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import java.util.ArrayList; import java.util.Collections; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/Assertion.java similarity index 86% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/Assertion.java index fbba9de163b..97ef2546074 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/Assertion.java @@ -16,9 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; -import org.elasticsearch.test.rest.RestTestExecutionContext; +import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import java.io.IOException; import java.util.Map; @@ -44,7 +44,7 @@ public abstract class Assertion implements ExecutableSection { return expectedValue; } - protected final Object resolveExpectedValue(RestTestExecutionContext executionContext) throws IOException { + protected final Object resolveExpectedValue(ClientYamlTestExecutionContext executionContext) throws IOException { if (expectedValue instanceof Map) { @SuppressWarnings("unchecked") Map map = (Map) expectedValue; @@ -57,7 +57,7 @@ public abstract class Assertion implements ExecutableSection { return expectedValue; } - protected final Object getActualValue(RestTestExecutionContext executionContext) throws IOException { + protected final Object getActualValue(ClientYamlTestExecutionContext executionContext) throws IOException { if (executionContext.stash().containsStashedValue(field)) { return executionContext.stash().getValue(field); } @@ -65,7 +65,7 @@ public abstract class Assertion implements ExecutableSection { } @Override - public final void execute(RestTestExecutionContext executionContext) throws IOException { + public final void execute(ClientYamlTestExecutionContext executionContext) throws IOException { doAssert(getActualValue(executionContext), resolveExpectedValue(executionContext)); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/TestSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSection.java similarity index 87% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/TestSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSection.java index 3f44e5ce767..4df126cba19 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/TestSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSection.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import java.util.ArrayList; import java.util.List; @@ -24,12 +24,12 @@ import java.util.List; /** * Represents a test section, which is composed of a skip section and multiple executable sections. */ -public class TestSection implements Comparable { +public class ClientYamlTestSection implements Comparable { private final String name; private SkipSection skipSection; private final List executableSections; - public TestSection(String name) { + public ClientYamlTestSection(String name) { this.name = name; this.executableSections = new ArrayList<>(); } @@ -59,7 +59,7 @@ public class TestSection implements Comparable { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - TestSection that = (TestSection) o; + ClientYamlTestSection that = (ClientYamlTestSection) o; if (name != null ? !name.equals(that.name) : that.name != null) return false; @@ -72,7 +72,7 @@ public class TestSection implements Comparable { } @Override - public int compareTo(TestSection o) { + public int compareTo(ClientYamlTestSection o) { return name.compareTo(o.getName()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java similarity index 81% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java index 5c093be3fa0..af3e1be0528 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import java.util.ArrayList; import java.util.List; @@ -27,7 +27,7 @@ import java.util.TreeSet; * Holds a REST test suite loaded from a specific yaml file. * Supports a setup section and multiple test sections. */ -public class RestTestSuite { +public class ClientYamlTestSuite { private final String api; private final String name; @@ -35,9 +35,9 @@ public class RestTestSuite { private SetupSection setupSection; private TeardownSection teardownSection; - private Set testSections = new TreeSet<>(); + private Set testSections = new TreeSet<>(); - public RestTestSuite(String api, String name) { + public ClientYamlTestSuite(String api, String name) { this.api = api; this.name = name; } @@ -71,14 +71,14 @@ public class RestTestSuite { } /** - * Adds a {@link org.elasticsearch.test.rest.section.TestSection} to the REST suite + * Adds a {@link org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection} to the REST suite * @return true if the test section was not already present, false otherwise */ - public boolean addTestSection(TestSection testSection) { + public boolean addTestSection(ClientYamlTestSection testSection) { return this.testSections.add(testSection); } - public List getTestSections() { + public List getTestSections() { return new ArrayList<>(testSections); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java similarity index 88% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 20b4f66f06c..78461130783 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -16,15 +16,15 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.test.rest.RestTestExecutionContext; -import org.elasticsearch.test.rest.client.RestTestResponse; -import org.elasticsearch.test.rest.client.RestTestResponseException; +import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; +import org.elasticsearch.test.rest.yaml.client.ClientYamlTestResponse; +import org.elasticsearch.test.rest.yaml.client.ClientYamlTestResponseException; import java.io.IOException; import java.util.HashMap; @@ -79,7 +79,7 @@ public class DoSection implements ExecutableSection { } @Override - public void execute(RestTestExecutionContext executionContext) throws IOException { + public void execute(ClientYamlTestExecutionContext executionContext) throws IOException { if ("param".equals(catchParam)) { //client should throw validation error before sending request @@ -89,7 +89,7 @@ public class DoSection implements ExecutableSection { } try { - RestTestResponse restTestResponse = executionContext.callApi(apiCallSection.getApi(), apiCallSection.getParams(), + ClientYamlTestResponse restTestResponse = executionContext.callApi(apiCallSection.getApi(), apiCallSection.getParams(), apiCallSection.getBodies(), apiCallSection.getHeaders()); if (Strings.hasLength(catchParam)) { String catchStatusCode; @@ -102,8 +102,8 @@ public class DoSection implements ExecutableSection { } fail(formatStatusCodeMessage(restTestResponse, catchStatusCode)); } - } catch(RestTestResponseException e) { - RestTestResponse restTestResponse = e.getRestTestResponse(); + } catch(ClientYamlTestResponseException e) { + ClientYamlTestResponse restTestResponse = e.getRestTestResponse(); if (!Strings.hasLength(catchParam)) { fail(formatStatusCodeMessage(restTestResponse, "2xx")); } else if (catches.containsKey(catchParam)) { @@ -124,13 +124,13 @@ public class DoSection implements ExecutableSection { } } - private void assertStatusCode(RestTestResponse restTestResponse) { + private void assertStatusCode(ClientYamlTestResponse restTestResponse) { Tuple> stringMatcherTuple = catches.get(catchParam); assertThat(formatStatusCodeMessage(restTestResponse, stringMatcherTuple.v1()), restTestResponse.getStatusCode(), stringMatcherTuple.v2()); } - private String formatStatusCodeMessage(RestTestResponse restTestResponse, String expected) { + private String formatStatusCodeMessage(ClientYamlTestResponse restTestResponse, String expected) { String api = apiCallSection.getApi(); if ("raw".equals(api)) { api += "[method=" + apiCallSection.getParams().get("method") + " path=" + apiCallSection.getParams().get("path") + "]"; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java similarity index 83% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java index 669d82cdd78..dbf8f4c1a4a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/ExecutableSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java @@ -16,9 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; -import org.elasticsearch.test.rest.RestTestExecutionContext; +import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import java.io.IOException; @@ -30,5 +30,5 @@ public interface ExecutableSection { /** * Executes the section passing in the execution context */ - void execute(RestTestExecutionContext executionContext) throws IOException; + void execute(ClientYamlTestExecutionContext executionContext) throws IOException; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java similarity index 97% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java index 63f69696653..9f2aa72ceda 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanEqualToAssertion.java similarity index 97% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanEqualToAssertion.java index 297eecf2d2a..5c1ff6c9008 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanEqualToAssertion.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsFalseAssertion.java similarity index 97% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsFalseAssertion.java index 3d5e21e5146..3c7bc9f3ce9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsFalseAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsFalseAssertion.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsTrueAssertion.java similarity index 97% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsTrueAssertion.java index 1a899c3cc2b..2a6b9431ad5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/IsTrueAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsTrueAssertion.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LengthAssertion.java similarity index 98% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LengthAssertion.java index eb28ba01a94..396e6464774 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LengthAssertion.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanAssertion.java similarity index 97% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanAssertion.java index 153a7824569..0493da1f016 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanAssertion.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanOrEqualToAssertion.java similarity index 97% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanOrEqualToAssertion.java index 1eb3a9fc2b2..f46a357bfe0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanOrEqualToAssertion.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java similarity index 99% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java index 3a96d4532a0..c0ec9ffca0b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.ESLogger; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/SetSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/SetSection.java similarity index 87% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/SetSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/SetSection.java index 940664b4ee9..6372b7c9a0d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/SetSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/SetSection.java @@ -16,9 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; -import org.elasticsearch.test.rest.RestTestExecutionContext; +import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import java.io.IOException; import java.util.HashMap; @@ -43,7 +43,7 @@ public class SetSection implements ExecutableSection { } @Override - public void execute(RestTestExecutionContext executionContext) throws IOException { + public void execute(ClientYamlTestExecutionContext executionContext) throws IOException { for (Map.Entry entry : stash.entrySet()) { Object actualValue = executionContext.response(entry.getKey()); executionContext.stash().stashValue(entry.getValue(), actualValue); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/SetupSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/SetupSection.java similarity index 97% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/SetupSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/SetupSection.java index 45c66fbad4f..c2bcaa3ecd1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/SetupSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/SetupSection.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import java.util.ArrayList; import java.util.List; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/SkipSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/SkipSection.java similarity index 97% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/SkipSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/SkipSection.java index 179d0a1e868..c9b6ead4aa8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/SkipSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/SkipSection.java @@ -16,11 +16,11 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.Version; import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.test.rest.support.Features; +import org.elasticsearch.test.rest.yaml.support.Features; import java.util.ArrayList; import java.util.List; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/TeardownSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/TeardownSection.java similarity index 96% rename from test/framework/src/main/java/org/elasticsearch/test/rest/section/TeardownSection.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/TeardownSection.java index b3709472be5..1e49f8b5037 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/TeardownSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/TeardownSection.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.test.rest.section; +package org.elasticsearch.test.rest.yaml.section; import java.util.ArrayList; import java.util.List; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/support/Features.java similarity index 97% rename from test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/support/Features.java index 98bce54082f..8d7a5a58e16 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/support/Features.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.test.rest.support; +package org.elasticsearch.test.rest.yaml.support; import org.elasticsearch.test.ESIntegTestCase; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/support/FileUtils.java similarity index 99% rename from test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/yaml/support/FileUtils.java index b32308f8cd8..783f0f9dcc1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/support/FileUtils.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.support; +package org.elasticsearch.test.rest.yaml.support; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcherTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/BlacklistedPathPatternMatcherTests.java similarity index 97% rename from test/framework/src/test/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcherTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/BlacklistedPathPatternMatcherTests.java index 9414a2219cf..a58a70111c2 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/BlacklistedPathPatternMatcherTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/BlacklistedPathPatternMatcherTests.java @@ -16,10 +16,11 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest; +package org.elasticsearch.test.rest.yaml; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.yaml.BlacklistedPathPatternMatcher; public class BlacklistedPathPatternMatcherTests extends ESTestCase { diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/ObjectPathTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ObjectPathTests.java similarity index 98% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/ObjectPathTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ObjectPathTests.java index 1d99a73c767..5559fd3f385 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/ObjectPathTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ObjectPathTests.java @@ -16,14 +16,14 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.Stash; +import org.elasticsearch.test.rest.yaml.ObjectPath; +import org.elasticsearch.test.rest.yaml.Stash; import java.io.IOException; import java.util.HashMap; diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/StashTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/StashTests.java similarity index 94% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/StashTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/StashTests.java index 7d0c0598f09..a8d32a316ac 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/StashTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/StashTests.java @@ -17,10 +17,10 @@ * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.Stash; +import org.elasticsearch.test.rest.yaml.Stash; import java.io.IOException; import java.util.HashMap; diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/AbstractParserTestCase.java similarity index 96% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/AbstractParserTestCase.java index 2a925dd2586..7d32389ddba 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/AbstractParserTestCase.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/AbstractParserTestCase.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.ESTestCase; diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/AssertionParsersTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/AssertionParsersTests.java similarity index 78% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/AssertionParsersTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/AssertionParsersTests.java index 68b84b99639..4e382fb7b09 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/AssertionParsersTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/AssertionParsersTests.java @@ -16,22 +16,22 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.xcontent.yaml.YamlXContent; -import org.elasticsearch.test.rest.parser.GreaterThanParser; -import org.elasticsearch.test.rest.parser.IsFalseParser; -import org.elasticsearch.test.rest.parser.IsTrueParser; -import org.elasticsearch.test.rest.parser.LengthParser; -import org.elasticsearch.test.rest.parser.LessThanParser; -import org.elasticsearch.test.rest.parser.MatchParser; -import org.elasticsearch.test.rest.parser.RestTestSuiteParseContext; -import org.elasticsearch.test.rest.section.GreaterThanAssertion; -import org.elasticsearch.test.rest.section.IsFalseAssertion; -import org.elasticsearch.test.rest.section.IsTrueAssertion; -import org.elasticsearch.test.rest.section.LengthAssertion; -import org.elasticsearch.test.rest.section.LessThanAssertion; -import org.elasticsearch.test.rest.section.MatchAssertion; +import org.elasticsearch.test.rest.yaml.parser.GreaterThanParser; +import org.elasticsearch.test.rest.yaml.parser.IsFalseParser; +import org.elasticsearch.test.rest.yaml.parser.IsTrueParser; +import org.elasticsearch.test.rest.yaml.parser.LengthParser; +import org.elasticsearch.test.rest.yaml.parser.LessThanParser; +import org.elasticsearch.test.rest.yaml.parser.MatchParser; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestSuiteParseContext; +import org.elasticsearch.test.rest.yaml.section.GreaterThanAssertion; +import org.elasticsearch.test.rest.yaml.section.IsFalseAssertion; +import org.elasticsearch.test.rest.yaml.section.IsTrueAssertion; +import org.elasticsearch.test.rest.yaml.section.LengthAssertion; +import org.elasticsearch.test.rest.yaml.section.LessThanAssertion; +import org.elasticsearch.test.rest.yaml.section.MatchAssertion; import java.util.List; import java.util.Map; @@ -47,7 +47,7 @@ public class AssertionParsersTests extends AbstractParserTestCase { ); IsTrueParser isTrueParser = new IsTrueParser(); - IsTrueAssertion trueAssertion = isTrueParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + IsTrueAssertion trueAssertion = isTrueParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(trueAssertion, notNullValue()); assertThat(trueAssertion.getField(), equalTo("get.fields._timestamp")); @@ -59,7 +59,7 @@ public class AssertionParsersTests extends AbstractParserTestCase { ); IsFalseParser isFalseParser = new IsFalseParser(); - IsFalseAssertion falseAssertion = isFalseParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + IsFalseAssertion falseAssertion = isFalseParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(falseAssertion, notNullValue()); assertThat(falseAssertion.getField(), equalTo("docs.1._source")); @@ -71,7 +71,7 @@ public class AssertionParsersTests extends AbstractParserTestCase { ); GreaterThanParser greaterThanParser = new GreaterThanParser(); - GreaterThanAssertion greaterThanAssertion = greaterThanParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + GreaterThanAssertion greaterThanAssertion = greaterThanParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(greaterThanAssertion, notNullValue()); assertThat(greaterThanAssertion.getField(), equalTo("field")); assertThat(greaterThanAssertion.getExpectedValue(), instanceOf(Integer.class)); @@ -84,7 +84,7 @@ public class AssertionParsersTests extends AbstractParserTestCase { ); LessThanParser lessThanParser = new LessThanParser(); - LessThanAssertion lessThanAssertion = lessThanParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + LessThanAssertion lessThanAssertion = lessThanParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(lessThanAssertion, notNullValue()); assertThat(lessThanAssertion.getField(), equalTo("field")); assertThat(lessThanAssertion.getExpectedValue(), instanceOf(Integer.class)); @@ -97,7 +97,7 @@ public class AssertionParsersTests extends AbstractParserTestCase { ); LengthParser lengthParser = new LengthParser(); - LengthAssertion lengthAssertion = lengthParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + LengthAssertion lengthAssertion = lengthParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(lengthAssertion, notNullValue()); assertThat(lengthAssertion.getField(), equalTo("_id")); assertThat(lengthAssertion.getExpectedValue(), instanceOf(Integer.class)); @@ -110,7 +110,7 @@ public class AssertionParsersTests extends AbstractParserTestCase { ); MatchParser matchParser = new MatchParser(); - MatchAssertion matchAssertion = matchParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + MatchAssertion matchAssertion = matchParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(matchAssertion, notNullValue()); assertThat(matchAssertion.getField(), equalTo("field")); @@ -124,7 +124,7 @@ public class AssertionParsersTests extends AbstractParserTestCase { ); MatchParser matchParser = new MatchParser(); - MatchAssertion matchAssertion = matchParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + MatchAssertion matchAssertion = matchParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(matchAssertion, notNullValue()); assertThat(matchAssertion.getField(), equalTo("foo")); @@ -138,7 +138,7 @@ public class AssertionParsersTests extends AbstractParserTestCase { ); MatchParser matchParser = new MatchParser(); - MatchAssertion matchAssertion = matchParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + MatchAssertion matchAssertion = matchParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(matchAssertion, notNullValue()); assertThat(matchAssertion.getField(), equalTo("matches")); @@ -156,7 +156,7 @@ public class AssertionParsersTests extends AbstractParserTestCase { ); MatchParser matchParser = new MatchParser(); - MatchAssertion matchAssertion = matchParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + MatchAssertion matchAssertion = matchParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(matchAssertion, notNullValue()); assertThat(matchAssertion.getField(), equalTo("_source")); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlSuiteTestParserTests.java similarity index 94% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlSuiteTestParserTests.java index 6b5cc3defb7..3b7313ec221 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/ClientYamlSuiteTestParserTests.java @@ -16,19 +16,19 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.Version; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.yaml.YamlXContent; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.parser.RestTestParseException; -import org.elasticsearch.test.rest.parser.RestTestSuiteParseContext; -import org.elasticsearch.test.rest.parser.RestTestSuiteParser; -import org.elasticsearch.test.rest.section.DoSection; -import org.elasticsearch.test.rest.section.IsTrueAssertion; -import org.elasticsearch.test.rest.section.MatchAssertion; -import org.elasticsearch.test.rest.section.RestTestSuite; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestSuiteParseContext; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestSuiteParser; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSuite; +import org.elasticsearch.test.rest.yaml.section.DoSection; +import org.elasticsearch.test.rest.yaml.section.IsTrueAssertion; +import org.elasticsearch.test.rest.yaml.section.MatchAssertion; import org.junit.After; import java.util.Map; @@ -39,7 +39,7 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class RestTestParserTests extends ESTestCase { +public class ClientYamlSuiteTestParserTests extends ESTestCase { private XContentParser parser; @Override @@ -103,8 +103,8 @@ public class RestTestParserTests extends ESTestCase { " - match: {test_type.properties.text.analyzer: whitespace}\n" ); - RestTestSuiteParser testParser = new RestTestSuiteParser(); - RestTestSuite restTestSuite = testParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + ClientYamlTestSuiteParser testParser = new ClientYamlTestSuiteParser(); + ClientYamlTestSuite restTestSuite = testParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(restTestSuite, notNullValue()); assertThat(restTestSuite.getName(), equalTo("suite")); @@ -207,8 +207,8 @@ public class RestTestParserTests extends ESTestCase { " - match: { _source: { foo: bar }}" ); - RestTestSuiteParser testParser = new RestTestSuiteParser(); - RestTestSuite restTestSuite = testParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + ClientYamlTestSuiteParser testParser = new ClientYamlTestSuiteParser(); + ClientYamlTestSuite restTestSuite = testParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(restTestSuite, notNullValue()); assertThat(restTestSuite.getName(), equalTo("suite")); @@ -320,8 +320,8 @@ public class RestTestParserTests extends ESTestCase { " params: { bar: 'xxx' }\n" ); - RestTestSuiteParser testParser = new RestTestSuiteParser(); - RestTestSuite restTestSuite = testParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + ClientYamlTestSuiteParser testParser = new ClientYamlTestSuiteParser(); + ClientYamlTestSuite restTestSuite = testParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(restTestSuite, notNullValue()); assertThat(restTestSuite.getName(), equalTo("suite")); @@ -394,11 +394,11 @@ public class RestTestParserTests extends ESTestCase { "\n" ); - RestTestSuiteParser testParser = new RestTestSuiteParser(); + ClientYamlTestSuiteParser testParser = new ClientYamlTestSuiteParser(); try { - testParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + testParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); fail("Expected RestTestParseException"); - } catch (RestTestParseException e) { + } catch (ClientYamlTestParseException e) { assertThat(e.getMessage(), containsString("duplicate test section")); } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/DoSectionParserTests.java similarity index 89% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/DoSectionParserTests.java index 655d3071d63..5d79432155f 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/DoSectionParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/DoSectionParserTests.java @@ -16,16 +16,16 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.yaml.YamlXContent; -import org.elasticsearch.test.rest.parser.DoSectionParser; -import org.elasticsearch.test.rest.parser.RestTestParseException; -import org.elasticsearch.test.rest.parser.RestTestSuiteParseContext; -import org.elasticsearch.test.rest.section.ApiCallSection; -import org.elasticsearch.test.rest.section.DoSection; +import org.elasticsearch.test.rest.yaml.parser.DoSectionParser; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestSuiteParseContext; +import org.elasticsearch.test.rest.yaml.section.ApiCallSection; +import org.elasticsearch.test.rest.yaml.section.DoSection; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -46,7 +46,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { ); DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); ApiCallSection apiCallSection = doSection.getApiCallSection(); assertThat(apiCallSection, notNullValue()); @@ -64,7 +64,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { ); DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); ApiCallSection apiCallSection = doSection.getApiCallSection(); assertThat(apiCallSection, notNullValue()); @@ -84,7 +84,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { ); DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); ApiCallSection apiCallSection = doSection.getApiCallSection(); assertThat(apiCallSection, notNullValue()); @@ -116,7 +116,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { ); DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); ApiCallSection apiCallSection = doSection.getApiCallSection(); assertThat(apiCallSection, notNullValue()); @@ -142,7 +142,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { ); DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); ApiCallSection apiCallSection = doSection.getApiCallSection(); assertThat(apiCallSection, notNullValue()); @@ -166,7 +166,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { String body = "{ \"_source\": [ \"include.field1\", \"include.field2\" ], \"query\": { \"match_all\": {} }}"; DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); ApiCallSection apiCallSection = doSection.getApiCallSection(); assertThat(apiCallSection, notNullValue()); @@ -202,7 +202,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { bodies[3] = "{ \"f1\":\"v2\", \"f2\": 47 }"; DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); ApiCallSection apiCallSection = doSection.getApiCallSection(); assertThat(apiCallSection, notNullValue()); @@ -235,7 +235,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { bodies[1] = "{ \"f1\":\"v1\", \"f2\": 42 }"; DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); ApiCallSection apiCallSection = doSection.getApiCallSection(); assertThat(apiCallSection, notNullValue()); @@ -264,7 +264,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { "]}"; DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); ApiCallSection apiCallSection = doSection.getApiCallSection(); assertThat(apiCallSection, notNullValue()); @@ -285,7 +285,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { ); DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); ApiCallSection apiCallSection = doSection.getApiCallSection(); assertThat(apiCallSection, notNullValue()); @@ -311,7 +311,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { String body = "{ \"size\": 100, \"query\": { \"match_all\": {} } }"; DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); ApiCallSection apiCallSection = doSection.getApiCallSection(); assertThat(apiCallSection.getApi(), equalTo("index")); @@ -332,7 +332,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { ); DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(doSection.getCatch(), equalTo("missing")); assertThat(doSection.getApiCallSection(), notNullValue()); @@ -352,7 +352,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { ); DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(doSection.getApiCallSection(), notNullValue()); assertThat(doSection.getApiCallSection().getApi(), equalTo("indices.get_warmer")); @@ -371,9 +371,9 @@ public class DoSectionParserTests extends AbstractParserTestCase { DoSectionParser doSectionParser = new DoSectionParser(); try { - doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); fail("Expected RestTestParseException"); - } catch (RestTestParseException e) { + } catch (ClientYamlTestParseException e) { assertThat(e.getMessage(), is("client call section is mandatory within a do section")); } } @@ -387,7 +387,7 @@ public class DoSectionParserTests extends AbstractParserTestCase { ); DoSectionParser doSectionParser = new DoSectionParser(); - DoSection doSection = doSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + DoSection doSection = doSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(doSection.getCatch(), nullValue()); assertThat(doSection.getApiCallSection(), notNullValue()); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/SetSectionParserTests.java similarity index 78% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/SetSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/SetSectionParserTests.java index c2b66375664..386cdd6deb4 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetSectionParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/SetSectionParserTests.java @@ -16,13 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.common.xcontent.yaml.YamlXContent; -import org.elasticsearch.test.rest.parser.RestTestParseException; -import org.elasticsearch.test.rest.parser.RestTestSuiteParseContext; -import org.elasticsearch.test.rest.parser.SetSectionParser; -import org.elasticsearch.test.rest.section.SetSection; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestSuiteParseContext; +import org.elasticsearch.test.rest.yaml.parser.SetSectionParser; +import org.elasticsearch.test.rest.yaml.section.SetSection; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -36,7 +36,7 @@ public class SetSectionParserTests extends AbstractParserTestCase { SetSectionParser setSectionParser = new SetSectionParser(); - SetSection setSection = setSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + SetSection setSection = setSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(setSection, notNullValue()); assertThat(setSection.getStash(), notNullValue()); @@ -51,7 +51,7 @@ public class SetSectionParserTests extends AbstractParserTestCase { SetSectionParser setSectionParser = new SetSectionParser(); - SetSection setSection = setSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + SetSection setSection = setSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(setSection, notNullValue()); assertThat(setSection.getStash(), notNullValue()); @@ -68,9 +68,9 @@ public class SetSectionParserTests extends AbstractParserTestCase { SetSectionParser setSectionParser = new SetSectionParser(); try { - setSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + setSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); fail("Expected RestTestParseException"); - } catch (RestTestParseException e) { + } catch (ClientYamlTestParseException e) { assertThat(e.getMessage(), is("set section must set at least a value")); } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/SetupSectionParserTests.java similarity index 92% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/SetupSectionParserTests.java index b3fe1f0f23b..fa7c34eecde 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SetupSectionParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/SetupSectionParserTests.java @@ -16,13 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.Version; import org.elasticsearch.common.xcontent.yaml.YamlXContent; -import org.elasticsearch.test.rest.parser.RestTestSuiteParseContext; -import org.elasticsearch.test.rest.parser.SetupSectionParser; -import org.elasticsearch.test.rest.section.SetupSection; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestSuiteParseContext; +import org.elasticsearch.test.rest.yaml.parser.SetupSectionParser; +import org.elasticsearch.test.rest.yaml.section.SetupSection; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -45,7 +45,7 @@ public class SetupSectionParserTests extends AbstractParserTestCase { ); SetupSectionParser setupSectionParser = new SetupSectionParser(); - SetupSection setupSection = setupSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + SetupSection setupSection = setupSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(setupSection, notNullValue()); assertThat(setupSection.getSkipSection().isEmpty(), equalTo(true)); @@ -74,7 +74,7 @@ public class SetupSectionParserTests extends AbstractParserTestCase { ); SetupSectionParser setupSectionParser = new SetupSectionParser(); - SetupSection setupSection = setupSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + SetupSection setupSection = setupSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(setupSection, notNullValue()); assertThat(setupSection.getSkipSection().isEmpty(), equalTo(false)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/SkipSectionParserTests.java similarity index 80% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/SkipSectionParserTests.java index 39b0f284b5e..f5d46cdd3d6 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/SkipSectionParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/SkipSectionParserTests.java @@ -16,15 +16,15 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.Version; import org.elasticsearch.common.xcontent.yaml.YamlXContent; import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.test.rest.parser.RestTestParseException; -import org.elasticsearch.test.rest.parser.RestTestSuiteParseContext; -import org.elasticsearch.test.rest.parser.SkipSectionParser; -import org.elasticsearch.test.rest.section.SkipSection; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestSuiteParseContext; +import org.elasticsearch.test.rest.yaml.parser.SkipSectionParser; +import org.elasticsearch.test.rest.yaml.section.SkipSection; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -40,7 +40,7 @@ public class SkipSectionParserTests extends AbstractParserTestCase { SkipSectionParser skipSectionParser = new SkipSectionParser(); - SkipSection skipSection = skipSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + SkipSection skipSection = skipSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(skipSection, notNullValue()); assertThat(skipSection.getLowerVersion(), equalTo(VersionUtils.getFirstVersion())); @@ -57,7 +57,7 @@ public class SkipSectionParserTests extends AbstractParserTestCase { SkipSectionParser skipSectionParser = new SkipSectionParser(); - SkipSection skipSection = skipSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + SkipSection skipSection = skipSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(skipSection, notNullValue()); assertThat(skipSection.getLowerVersion(), equalTo(VersionUtils.getFirstVersion())); @@ -73,7 +73,7 @@ public class SkipSectionParserTests extends AbstractParserTestCase { SkipSectionParser skipSectionParser = new SkipSectionParser(); - SkipSection skipSection = skipSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + SkipSection skipSection = skipSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(skipSection, notNullValue()); assertThat(skipSection.isVersionCheck(), equalTo(false)); @@ -89,7 +89,7 @@ public class SkipSectionParserTests extends AbstractParserTestCase { SkipSectionParser skipSectionParser = new SkipSectionParser(); - SkipSection skipSection = skipSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + SkipSection skipSection = skipSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(skipSection, notNullValue()); assertThat(skipSection.isVersionCheck(), equalTo(false)); @@ -110,9 +110,9 @@ public class SkipSectionParserTests extends AbstractParserTestCase { SkipSectionParser skipSectionParser = new SkipSectionParser(); try { - skipSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + skipSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); fail("Expected RestTestParseException"); - } catch (RestTestParseException e) { + } catch (ClientYamlTestParseException e) { assertThat(e.getMessage(), is("version or features are mutually exclusive")); } } @@ -124,9 +124,9 @@ public class SkipSectionParserTests extends AbstractParserTestCase { SkipSectionParser skipSectionParser = new SkipSectionParser(); try { - skipSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + skipSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); fail("Expected RestTestParseException"); - } catch (RestTestParseException e) { + } catch (ClientYamlTestParseException e) { assertThat(e.getMessage(), is("reason is mandatory within skip version section")); } } @@ -138,9 +138,9 @@ public class SkipSectionParserTests extends AbstractParserTestCase { SkipSectionParser skipSectionParser = new SkipSectionParser(); try { - skipSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + skipSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); fail("Expected RestTestParseException"); - } catch (RestTestParseException e) { + } catch (ClientYamlTestParseException e) { assertThat(e.getMessage(), is("version or features is mandatory within skip section")); } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/TeardownSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/TeardownSectionParserTests.java similarity index 90% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/TeardownSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/TeardownSectionParserTests.java index eeccea5f5e5..4ad47e35e48 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/TeardownSectionParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/TeardownSectionParserTests.java @@ -17,13 +17,13 @@ * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.Version; import org.elasticsearch.common.xcontent.yaml.YamlXContent; -import org.elasticsearch.test.rest.parser.RestTestSuiteParseContext; -import org.elasticsearch.test.rest.parser.TeardownSectionParser; -import org.elasticsearch.test.rest.section.TeardownSection; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestSuiteParseContext; +import org.elasticsearch.test.rest.yaml.parser.TeardownSectionParser; +import org.elasticsearch.test.rest.yaml.section.TeardownSection; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -50,7 +50,7 @@ public class TeardownSectionParserTests extends AbstractParserTestCase { ); TeardownSectionParser teardownSectionParser = new TeardownSectionParser(); - TeardownSection section = teardownSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + TeardownSection section = teardownSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(section, notNullValue()); assertThat(section.getSkipSection().isEmpty(), equalTo(true)); @@ -79,7 +79,7 @@ public class TeardownSectionParserTests extends AbstractParserTestCase { ); TeardownSectionParser teardownSectionParser = new TeardownSectionParser(); - TeardownSection section = teardownSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + TeardownSection section = teardownSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(section, notNullValue()); assertThat(section.getSkipSection().isEmpty(), equalTo(false)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/TestSectionParserTests.java similarity index 85% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/TestSectionParserTests.java index d034ae56a71..aa23cabe07b 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/TestSectionParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/parser/TestSectionParserTests.java @@ -16,22 +16,22 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml.parser; import org.elasticsearch.Version; import org.elasticsearch.common.xcontent.yaml.YamlXContent; -import org.elasticsearch.test.rest.parser.RestTestSectionParser; -import org.elasticsearch.test.rest.parser.RestTestSuiteParseContext; -import org.elasticsearch.test.rest.section.DoSection; -import org.elasticsearch.test.rest.section.GreaterThanAssertion; -import org.elasticsearch.test.rest.section.IsFalseAssertion; -import org.elasticsearch.test.rest.section.IsTrueAssertion; -import org.elasticsearch.test.rest.section.LengthAssertion; -import org.elasticsearch.test.rest.section.LessThanAssertion; -import org.elasticsearch.test.rest.section.MatchAssertion; -import org.elasticsearch.test.rest.section.SetSection; -import org.elasticsearch.test.rest.section.SkipSection; -import org.elasticsearch.test.rest.section.TestSection; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestSectionParser; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestSuiteParseContext; +import org.elasticsearch.test.rest.yaml.section.DoSection; +import org.elasticsearch.test.rest.yaml.section.GreaterThanAssertion; +import org.elasticsearch.test.rest.yaml.section.IsFalseAssertion; +import org.elasticsearch.test.rest.yaml.section.IsTrueAssertion; +import org.elasticsearch.test.rest.yaml.section.LengthAssertion; +import org.elasticsearch.test.rest.yaml.section.LessThanAssertion; +import org.elasticsearch.test.rest.yaml.section.MatchAssertion; +import org.elasticsearch.test.rest.yaml.section.SetSection; +import org.elasticsearch.test.rest.yaml.section.SkipSection; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; import java.util.Map; @@ -51,8 +51,8 @@ public class TestSectionParserTests extends AbstractParserTestCase { " name: test_warmer" ); - RestTestSectionParser testSectionParser = new RestTestSectionParser(); - TestSection testSection = testSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + ClientYamlTestSectionParser testSectionParser = new ClientYamlTestSectionParser(); + ClientYamlTestSection testSection = testSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(testSection, notNullValue()); assertThat(testSection.getName(), equalTo("First test section")); @@ -80,9 +80,9 @@ public class TestSectionParserTests extends AbstractParserTestCase { " - set: {_scroll_id: scroll_id}"; - RestTestSectionParser testSectionParser = new RestTestSectionParser(); + ClientYamlTestSectionParser testSectionParser = new ClientYamlTestSectionParser(); parser = YamlXContent.yamlXContent.createParser(yaml); - TestSection testSection = testSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + ClientYamlTestSection testSection = testSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(testSection, notNullValue()); assertThat(testSection.getName(), equalTo("First test section")); @@ -119,8 +119,8 @@ public class TestSectionParserTests extends AbstractParserTestCase { " id: 中文" ); - RestTestSectionParser testSectionParser = new RestTestSectionParser(); - TestSection testSection = testSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + ClientYamlTestSectionParser testSectionParser = new ClientYamlTestSectionParser(); + ClientYamlTestSection testSection = testSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(testSection, notNullValue()); assertThat(testSection.getName(), equalTo("Basic")); @@ -172,8 +172,8 @@ public class TestSectionParserTests extends AbstractParserTestCase { " - lt: { size: 10 }" ); - RestTestSectionParser testSectionParser = new RestTestSectionParser(); - TestSection testSection = testSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + ClientYamlTestSectionParser testSectionParser = new ClientYamlTestSectionParser(); + ClientYamlTestSection testSection = testSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(testSection, notNullValue()); assertThat(testSection.getName(), equalTo("Basic")); @@ -242,8 +242,8 @@ public class TestSectionParserTests extends AbstractParserTestCase { " \n" + " - is_true: nodes\n" + " - is_true: cluster_name\n"); - RestTestSectionParser testSectionParser = new RestTestSectionParser(); - TestSection testSection = testSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + ClientYamlTestSectionParser testSectionParser = new ClientYamlTestSectionParser(); + ClientYamlTestSection testSection = testSectionParser.parse(new ClientYamlTestSuiteParseContext("api", "suite", parser)); assertThat(testSection, notNullValue()); assertThat(testSection.getName(), equalTo("node_info test")); assertThat(testSection.getExecutableSections().size(), equalTo(3)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParserFailingTests.java similarity index 89% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParserFailingTests.java index 0cd8ee31398..f2219816462 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParserFailingTests.java @@ -16,21 +16,22 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml.restspec; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.spec.RestApiParser; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApiParser; import java.io.IOException; import static org.hamcrest.Matchers.containsString; /** - * These tests are not part of {@link RestApiParserTests} because the tested failures don't allow to consume the whole yaml stream + * These tests are not part of {@link ClientYamlSuiteRestApiParserTests} because the tested failures don't allow to consume the whole yaml + * stream */ -public class RestApiParserFailingTests extends ESTestCase { +public class ClientYamlSuiteRestApiParserFailingTests extends ESTestCase { public void testBrokenSpecShouldThrowUsefulExceptionWhenParsingFailsOnParams() throws Exception { parseAndExpectFailure(BROKEN_SPEC_PARAMS, "Expected params field in rest api definition to contain an object"); } @@ -42,7 +43,7 @@ public class RestApiParserFailingTests extends ESTestCase { private void parseAndExpectFailure(String brokenJson, String expectedErrorMessage) throws Exception { XContentParser parser = JsonXContent.jsonXContent.createParser(brokenJson); try { - new RestApiParser().parse("location", parser); + new ClientYamlSuiteRestApiParser().parse("location", parser); fail("Expected to fail parsing but did not happen"); } catch (IOException e) { assertThat(e.getMessage(), containsString(expectedErrorMessage)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParserTests.java similarity index 93% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParserTests.java index d884b327f71..342f2bb4ed2 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiParserTests.java @@ -16,20 +16,21 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml.restspec; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.rest.spec.RestApi; -import org.elasticsearch.test.rest.spec.RestApiParser; +import org.elasticsearch.test.rest.yaml.parser.AbstractParserTestCase; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApiParser; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; -public class RestApiParserTests extends AbstractParserTestCase { +public class ClientYamlSuiteRestApiParserTests extends AbstractParserTestCase { public void testParseRestSpecIndexApi() throws Exception { parser = JsonXContent.jsonXContent.createParser(REST_SPEC_INDEX_API); - RestApi restApi = new RestApiParser().parse("location", parser); + ClientYamlSuiteRestApi restApi = new ClientYamlSuiteRestApiParser().parse("location", parser); assertThat(restApi, notNullValue()); assertThat(restApi.getName(), equalTo("index")); @@ -51,7 +52,7 @@ public class RestApiParserTests extends AbstractParserTestCase { public void testParseRestSpecGetTemplateApi() throws Exception { parser = JsonXContent.jsonXContent.createParser(REST_SPEC_GET_TEMPLATE_API); - RestApi restApi = new RestApiParser().parse("location", parser); + ClientYamlSuiteRestApi restApi = new ClientYamlSuiteRestApiParser().parse("location", parser); assertThat(restApi, notNullValue()); assertThat(restApi.getName(), equalTo("indices.get_template")); assertThat(restApi.getMethods().size(), equalTo(1)); @@ -68,7 +69,7 @@ public class RestApiParserTests extends AbstractParserTestCase { public void testParseRestSpecCountApi() throws Exception { parser = JsonXContent.jsonXContent.createParser(REST_SPEC_COUNT_API); - RestApi restApi = new RestApiParser().parse("location", parser); + ClientYamlSuiteRestApi restApi = new ClientYamlSuiteRestApiParser().parse("location", parser); assertThat(restApi, notNullValue()); assertThat(restApi.getName(), equalTo("count")); assertThat(restApi.getMethods().size(), equalTo(2)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/support/FileUtilsTests.java similarity index 98% rename from test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/support/FileUtilsTests.java index db41c42e16a..c02ca5d85d0 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/support/FileUtilsTests.java @@ -16,10 +16,10 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.test; +package org.elasticsearch.test.rest.yaml.support; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.support.FileUtils; +import org.elasticsearch.test.rest.yaml.support.FileUtils; import java.nio.file.Files; import java.nio.file.Path; From 643ccb8cc189186db993108c51da6b49dc50540a Mon Sep 17 00:00:00 2001 From: kingrhoton Date: Tue, 26 Jul 2016 10:59:33 -0700 Subject: [PATCH 84/93] [docs] Switch contraction to possesive --- docs/reference/modules/discovery/zen.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/modules/discovery/zen.asciidoc b/docs/reference/modules/discovery/zen.asciidoc index 60823393d11..f51fecb9079 100644 --- a/docs/reference/modules/discovery/zen.asciidoc +++ b/docs/reference/modules/discovery/zen.asciidoc @@ -64,7 +64,7 @@ Nodes can be excluded from becoming a master by setting `node.master` to `false` The `discovery.zen.minimum_master_nodes` sets the minimum number of master eligible nodes that need to join a newly elected master in order for an election to -complete and for the elected node to accept it's mastership. The same setting controls the minimum number of +complete and for the elected node to accept its mastership. The same setting controls the minimum number of active master eligible nodes that should be a part of any active cluster. If this requirement is not met the active master node will step down and a new master election will be begin. From 7275291f354aceb4fa18b0565345a9e0ff9e6ce1 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Tue, 26 Jul 2016 15:08:19 -0400 Subject: [PATCH 85/93] Tests: add more logging to testCorruptFileThenSnapshotAndRestore This test fails because of an unknown exceptions in FsService.stats() method, which causes no stats to be returned. With this change the exception that is causing this issue is going to be logged. Related to #19591 and #17964 --- .../java/org/elasticsearch/index/store/CorruptedFileIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index a8f8a9f802d..7813c8402f0 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -70,6 +70,7 @@ import org.elasticsearch.test.CorruptionUtils; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.MockIndexEventListener; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.TransportException; @@ -471,6 +472,7 @@ public class CorruptedFileIT extends ESIntegTestCase { * TODO once checksum verification on snapshotting is implemented this test needs to be fixed or split into several * parts... We should also corrupt files on the actual snapshot and check that we don't restore the corrupted shard. */ + @TestLogging("monitor.fs:DEBUG") public void testCorruptFileThenSnapshotAndRestore() throws ExecutionException, InterruptedException, IOException { int numDocs = scaledRandomIntBetween(100, 1000); internalCluster().ensureAtLeastNumDataNodes(2); From 0876247bca74ab041386ab5a70a70853b2f1b160 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Tue, 26 Jul 2016 14:14:04 -0600 Subject: [PATCH 86/93] [TEST] Assert that shard has been released before running truncate tool It's possible that the shard has been closed but the resources associated with it have not yet been released. This waits until the index lock can be obtained before running the tool. --- .../index/translog/TruncateTranslogIT.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java index f6a28169898..fa8a87cf382 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java @@ -22,6 +22,13 @@ package org.elasticsearch.index.translog; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import joptsimple.OptionParser; import joptsimple.OptionSet; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.Lock; +import org.apache.lucene.store.LockObtainFailedException; +import org.apache.lucene.store.NativeFSLockFactory; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; @@ -139,6 +146,19 @@ public class TruncateTranslogIT extends ESIntegTestCase { client().admin().indices().prepareClose("test").get(); for (Path translogDir : translogDirs) { + final Path idxLocation = translogDir.getParent().resolve("index"); + assertBusy(() -> { + logger.info("--> checking that lock has been released for {}", idxLocation); + try (Directory dir = FSDirectory.open(idxLocation, NativeFSLockFactory.INSTANCE); + Lock writeLock = dir.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { + // Great, do nothing, we just wanted to obtain the lock + } catch (LockObtainFailedException lofe) { + throw new ElasticsearchException("Still waiting for lock release at [" + idxLocation + "]"); + } catch (IOException ioe) { + fail("Got an IOException: " + ioe); + } + }); + OptionSet options = parser.parse("-d", translogDir.toAbsolutePath().toString(), "-b"); logger.info("--> running truncate translog command for [{}]", translogDir.toAbsolutePath()); ttc.execute(t, options, new HashMap()); From 6f76740a5859290cfa7e4fd4b05b5854f2051aa6 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 26 Jul 2016 23:42:10 +0200 Subject: [PATCH 87/93] await fix testConcurrentSendRespondAndDisconnect --- .../transport/AbstractSimpleTransportTestCase.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 73855c13673..eb4dbb8bca5 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -490,6 +490,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } @TestLogging("transport:DEBUG,transport.tracer:TRACE") + // boaz is on this + @AwaitsFix(bugUrl = "https://elasticsearch-ci.elastic.co/job/elastic+elasticsearch+master+multijob-os-compatibility/os=oraclelinux/835") public void testConcurrentSendRespondAndDisconnect() throws BrokenBarrierException, InterruptedException { Set sendingErrors = ConcurrentCollections.newConcurrentSet(); Set responseErrors = ConcurrentCollections.newConcurrentSet(); From 21ff90fed33fc4ce9b1d16e678fc6a2f82dabcc2 Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Tue, 26 Jul 2016 19:17:02 -0400 Subject: [PATCH 88/93] Fixes debug logging on index creation waiting for shards to be started (#19612) --- .../cluster/metadata/MetaDataCreateIndexService.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index bab03febaae..3e5e0e92237 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -202,8 +202,10 @@ public class MetaDataCreateIndexService extends AbstractComponent { if (response.isAcknowledged()) { activeShardsObserver.waitForActiveShards(request.index(), request.waitForActiveShards(), request.ackTimeout(), shardsAcked -> { - logger.debug("[{}] index created, but the operation timed out while waiting for " + - "enough shards to be started.", request.index()); + if (shardsAcked == false) { + logger.debug("[{}] index created, but the operation timed out while waiting for " + + "enough shards to be started.", request.index()); + } listener.onResponse(new CreateIndexClusterStateUpdateResponse(response.isAcknowledged(), shardsAcked)); }, listener::onFailure); } else { From 24d7fa6d54339b2ab9459558e507ec958f980ca8 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 26 Jul 2016 20:47:14 +0200 Subject: [PATCH 89/93] ingest: Change the `foreach` processor to use the `_ingest._value` ingest metadata attribute to store the current array element being processed. Closes #19592 --- .../ingest/WriteableIngestDocument.java | 11 +- .../ingest/CompoundProcessor.java | 4 +- .../elasticsearch/ingest/IngestDocument.java | 6 +- .../ingest/SimulateExecutionServiceTests.java | 2 +- .../ingest/TrackingResultProcessorTests.java | 2 +- .../ingest/WriteableIngestDocumentTests.java | 8 +- .../ingest/CompoundProcessorTests.java | 12 +- .../ingest/IngestDocumentTests.java | 6 +- docs/reference/ingest/ingest-node.asciidoc | 20 ++-- .../ingest/common/ForEachProcessor.java | 11 +- .../ingest/common/ForEachProcessorTests.java | 107 ++++++++++++++++-- .../rest-api-spec/test/ingest/80_foreach.yaml | 2 +- .../10_pipeline_with_mustache_templates.yaml | 2 +- .../test/ingest/20_combine_processors.yaml | 4 +- 14 files changed, 143 insertions(+), 54 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java b/core/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java index 137914701db..84c41ae689b 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java @@ -41,15 +41,14 @@ final class WriteableIngestDocument implements Writeable, ToXContent { WriteableIngestDocument(StreamInput in) throws IOException { Map sourceAndMetadata = in.readMap(); - @SuppressWarnings("unchecked") - Map ingestMetadata = (Map) in.readGenericValue(); + Map ingestMetadata = in.readMap(); this.ingestDocument = new IngestDocument(sourceAndMetadata, ingestMetadata); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeMap(ingestDocument.getSourceAndMetadata()); - out.writeGenericValue(ingestDocument.getIngestMetadata()); + out.writeMap(ingestDocument.getIngestMetadata()); } IngestDocument getIngestDocument() { @@ -66,11 +65,7 @@ final class WriteableIngestDocument implements Writeable, ToXContent { } } builder.field("_source", ingestDocument.getSourceAndMetadata()); - builder.startObject("_ingest"); - for (Map.Entry ingestMetadata : ingestDocument.getIngestMetadata().entrySet()) { - builder.field(ingestMetadata.getKey(), ingestMetadata.getValue()); - } - builder.endObject(); + builder.field("_ingest", ingestDocument.getIngestMetadata()); builder.endObject(); return builder; } diff --git a/core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java b/core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java index 501e8c1b2f9..3ab7c078cd7 100644 --- a/core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java @@ -135,14 +135,14 @@ public class CompoundProcessor implements Processor { List processorTagHeader = cause.getHeader("processor_tag"); String failedProcessorType = (processorTypeHeader != null) ? processorTypeHeader.get(0) : null; String failedProcessorTag = (processorTagHeader != null) ? processorTagHeader.get(0) : null; - Map ingestMetadata = ingestDocument.getIngestMetadata(); + Map ingestMetadata = ingestDocument.getIngestMetadata(); ingestMetadata.put(ON_FAILURE_MESSAGE_FIELD, cause.getRootCause().getMessage()); ingestMetadata.put(ON_FAILURE_PROCESSOR_TYPE_FIELD, failedProcessorType); ingestMetadata.put(ON_FAILURE_PROCESSOR_TAG_FIELD, failedProcessorTag); } private void removeFailureMetadata(IngestDocument ingestDocument) { - Map ingestMetadata = ingestDocument.getIngestMetadata(); + Map ingestMetadata = ingestDocument.getIngestMetadata(); ingestMetadata.remove(ON_FAILURE_MESSAGE_FIELD); ingestMetadata.remove(ON_FAILURE_PROCESSOR_TYPE_FIELD); ingestMetadata.remove(ON_FAILURE_PROCESSOR_TAG_FIELD); diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java index baa0f3acd0d..86eaf7d179f 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -54,7 +54,7 @@ public final class IngestDocument { static final String TIMESTAMP = "timestamp"; private final Map sourceAndMetadata; - private final Map ingestMetadata; + private final Map ingestMetadata; public IngestDocument(String index, String type, String id, String routing, String parent, String timestamp, String ttl, Map source) { @@ -94,7 +94,7 @@ public final class IngestDocument { * source and ingest metadata. This is needed because the ingest metadata will be initialized with the current timestamp at * init time, which makes equality comparisons impossible in tests. */ - public IngestDocument(Map sourceAndMetadata, Map ingestMetadata) { + public IngestDocument(Map sourceAndMetadata, Map ingestMetadata) { this.sourceAndMetadata = sourceAndMetadata; this.ingestMetadata = ingestMetadata; } @@ -517,7 +517,7 @@ public final class IngestDocument { * Returns the available ingest metadata fields, by default only timestamp, but it is possible to set additional ones. * Use only for reading values, modify them instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)} */ - public Map getIngestMetadata() { + public Map getIngestMetadata() { return this.ingestMetadata; } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java index 5b3551b24d1..8cf05509813 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java @@ -145,7 +145,7 @@ public class SimulateExecutionServiceTests extends ESTestCase { assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), not(sameInstance(ingestDocument))); IngestDocument ingestDocumentWithOnFailureMetadata = new IngestDocument(ingestDocument); - Map metadata = ingestDocumentWithOnFailureMetadata.getIngestMetadata(); + Map metadata = ingestDocumentWithOnFailureMetadata.getIngestMetadata(); metadata.put(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD, "mock"); metadata.put(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD, "processor_0"); metadata.put(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD, "processor failed"); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/TrackingResultProcessorTests.java b/core/src/test/java/org/elasticsearch/action/ingest/TrackingResultProcessorTests.java index 999cbe435f2..3572a04529b 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/TrackingResultProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/TrackingResultProcessorTests.java @@ -111,7 +111,7 @@ public class TrackingResultProcessorTests extends ESTestCase { assertThat(resultList.get(0).getFailure(), equalTo(exception)); assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedFailResult.getProcessorTag())); - Map metadata = resultList.get(1).getIngestDocument().getIngestMetadata(); + Map metadata = resultList.get(1).getIngestDocument().getIngestMetadata(); assertThat(metadata.get(ON_FAILURE_MESSAGE_FIELD), equalTo("fail")); assertThat(metadata.get(ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("test")); assertThat(metadata.get(ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("fail")); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java b/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java index b4908846e97..5df0aa1de0c 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java @@ -47,7 +47,7 @@ public class WriteableIngestDocumentTests extends ESTestCase { for (int i = 0; i < numFields; i++) { sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); } - Map ingestMetadata = new HashMap<>(); + Map ingestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); for (int i = 0; i < numFields; i++) { ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); @@ -70,7 +70,7 @@ public class WriteableIngestDocumentTests extends ESTestCase { changed = true; } - Map otherIngestMetadata; + Map otherIngestMetadata; if (randomBoolean()) { otherIngestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); @@ -103,7 +103,7 @@ public class WriteableIngestDocumentTests extends ESTestCase { for (int i = 0; i < numFields; i++) { sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); } - Map ingestMetadata = new HashMap<>(); + Map ingestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); for (int i = 0; i < numFields; i++) { ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); @@ -131,7 +131,7 @@ public class WriteableIngestDocumentTests extends ESTestCase { Map toXContentDoc = (Map) toXContentMap.get("doc"); Map toXContentSource = (Map) toXContentDoc.get("_source"); - Map toXContentIngestMetadata = (Map) toXContentDoc.get("_ingest"); + Map toXContentIngestMetadata = (Map) toXContentDoc.get("_ingest"); Map metadataMap = ingestDocument.extractMetadata(); for (Map.Entry metadata : metadataMap.entrySet()) { diff --git a/core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java index be6ec1059d6..aaede49a36d 100644 --- a/core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java @@ -86,7 +86,7 @@ public class CompoundProcessorTests extends ESTestCase { public void testSingleProcessorWithOnFailureProcessor() throws Exception { TestProcessor processor1 = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");}); TestProcessor processor2 = new TestProcessor(ingestDocument -> { - Map ingestMetadata = ingestDocument.getIngestMetadata(); + Map ingestMetadata = ingestDocument.getIngestMetadata(); assertThat(ingestMetadata.size(), equalTo(3)); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first")); @@ -104,7 +104,7 @@ public class CompoundProcessorTests extends ESTestCase { public void testSingleProcessorWithNestedFailures() throws Exception { TestProcessor processor = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");}); TestProcessor processorToFail = new TestProcessor("id2", "second", ingestDocument -> { - Map ingestMetadata = ingestDocument.getIngestMetadata(); + Map ingestMetadata = ingestDocument.getIngestMetadata(); assertThat(ingestMetadata.size(), equalTo(3)); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first")); @@ -112,7 +112,7 @@ public class CompoundProcessorTests extends ESTestCase { throw new RuntimeException("error"); }); TestProcessor lastProcessor = new TestProcessor(ingestDocument -> { - Map ingestMetadata = ingestDocument.getIngestMetadata(); + Map ingestMetadata = ingestDocument.getIngestMetadata(); assertThat(ingestMetadata.size(), equalTo(3)); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("second")); @@ -131,7 +131,7 @@ public class CompoundProcessorTests extends ESTestCase { public void testCompoundProcessorExceptionFailWithoutOnFailure() throws Exception { TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error");}); TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> { - Map ingestMetadata = ingestDocument.getIngestMetadata(); + Map ingestMetadata = ingestDocument.getIngestMetadata(); assertThat(ingestMetadata.entrySet(), hasSize(3)); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first")); @@ -153,7 +153,7 @@ public class CompoundProcessorTests extends ESTestCase { TestProcessor failProcessor = new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");}); TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> { - Map ingestMetadata = ingestDocument.getIngestMetadata(); + Map ingestMetadata = ingestDocument.getIngestMetadata(); assertThat(ingestMetadata.entrySet(), hasSize(3)); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("custom error message")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("fail")); @@ -176,7 +176,7 @@ public class CompoundProcessorTests extends ESTestCase { TestProcessor failProcessor = new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");}); TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> { - Map ingestMetadata = ingestDocument.getIngestMetadata(); + Map ingestMetadata = ingestDocument.getIngestMetadata(); assertThat(ingestMetadata.entrySet(), hasSize(3)); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("custom error message")); assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("fail")); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index ee4da02478d..fa146951283 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -907,7 +907,7 @@ public class IngestDocumentTests extends ESTestCase { for (int i = 0; i < numFields; i++) { sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); } - Map ingestMetadata = new HashMap<>(); + Map ingestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); for (int i = 0; i < numFields; i++) { ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); @@ -930,7 +930,7 @@ public class IngestDocumentTests extends ESTestCase { changed = true; } - Map otherIngestMetadata; + Map otherIngestMetadata; if (randomBoolean()) { otherIngestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); @@ -962,7 +962,7 @@ public class IngestDocumentTests extends ESTestCase { long before = System.currentTimeMillis(); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); long after = System.currentTimeMillis(); - String timestampString = ingestDocument.getIngestMetadata().get("timestamp"); + String timestampString = (String) ingestDocument.getIngestMetadata().get("timestamp"); assertThat(timestampString, notNullValue()); assertThat(timestampString, endsWith("+0000")); DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", Locale.ROOT); diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 00f25434073..25c42f17870 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -859,8 +859,16 @@ because it is likely that the number of elements in an array is unknown. For thi processor exists. By specifying the field holding array elements and a processor that defines what should happen to each element, array fields can easily be preprocessed. -A processor inside the foreach processor works in a different context, and the only valid top-level -field is `_value`, which holds the array element value. Under this field other fields may exist. +A processor inside the foreach processor works in the array element context and puts that in the ingest metadata +under the `_ingest._value` key. If the array element is a json object it holds all immediate fields of that json object. +and if the nested object is a value is `_ingest._value` just holds that value. Note that if a processor prior to the +`foreach` processor used `_ingest._value` key then the specified value will not be available to the processor inside +the `foreach` processor. The `foreach` processor does restore the original value, so that value is available to processors +after the `foreach` processor. + +Note that any other field from the document are accessible and modifiable like with all other processors. This processor +just puts the current array element being read into `_ingest._value` ingest metadata attribute, so that it may be +pre-processed. If the `foreach` processor fails to process an element inside the array, and no `on_failure` processor has been specified, then it aborts the execution and leaves the array unmodified. @@ -892,7 +900,7 @@ When this `foreach` processor operates on this sample document: "field" : "values", "processor" : { "uppercase" : { - "field" : "_value" + "field" : "_ingest._value" } } } @@ -936,7 +944,7 @@ so the following `foreach` processor is used: "field" : "persons", "processor" : { "remove" : { - "field" : "_value.id" + "field" : "_ingest._value.id" } } } @@ -959,9 +967,7 @@ After preprocessing the result is: } -------------------------------------------------- -As for any processor, you can define `on_failure` processors -in processors that are wrapped inside the `foreach` processor. - +The wrapped processor can have a `on_failure` definition. For example, the `id` field may not exist on all person objects. Instead of failing the index request, you can use an `on_failure` block to send the document to the 'failure_index' index for later inspection: diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java index c4640733d06..92804dc0513 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java @@ -62,11 +62,12 @@ public final class ForEachProcessor extends AbstractProcessor { List values = ingestDocument.getFieldValue(field, List.class); List newValues = new ArrayList<>(values.size()); for (Object value : values) { - Map innerSource = new HashMap<>(ingestDocument.getSourceAndMetadata()); - innerSource.put("_value", value); // scalar value to access the list item being evaluated - IngestDocument innerIngestDocument = new IngestDocument(innerSource, ingestDocument.getIngestMetadata()); - processor.execute(innerIngestDocument); - newValues.add(innerSource.get("_value")); + Object previousValue = ingestDocument.getIngestMetadata().put("_value", value); + try { + processor.execute(ingestDocument); + } finally { + newValues.add(ingestDocument.getIngestMetadata().put("_value", previousValue)); + } } ingestDocument.setFieldValue(field, newValues); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java index 714722418e7..3f9ee8bc961 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java @@ -49,7 +49,7 @@ public class ForEachProcessorTests extends ESTestCase { ); ForEachProcessor processor = new ForEachProcessor( - "_tag", "values", new UppercaseProcessor("_tag", "_value") + "_tag", "values", new UppercaseProcessor("_tag", "_ingest._value") ); processor.execute(ingestDocument); @@ -65,7 +65,7 @@ public class ForEachProcessorTests extends ESTestCase { ); TestProcessor testProcessor = new TestProcessor(id -> { - if ("c".equals(id.getFieldValue("_value", String.class))) { + if ("c".equals(id.getFieldValue("_ingest._value", String.class))) { throw new RuntimeException("failure"); } }); @@ -80,11 +80,11 @@ public class ForEachProcessorTests extends ESTestCase { assertThat(ingestDocument.getFieldValue("values", List.class), equalTo(Arrays.asList("a", "b", "c"))); testProcessor = new TestProcessor(id -> { - String value = id.getFieldValue("_value", String.class); + String value = id.getFieldValue("_ingest._value", String.class); if ("c".equals(value)) { throw new RuntimeException("failure"); } else { - id.setFieldValue("_value", value.toUpperCase(Locale.ROOT)); + id.setFieldValue("_ingest._value", value.toUpperCase(Locale.ROOT)); } }); Processor onFailureProcessor = new TestProcessor(ingestDocument1 -> {}); @@ -105,9 +105,9 @@ public class ForEachProcessorTests extends ESTestCase { ); TestProcessor innerProcessor = new TestProcessor(id -> { - id.setFieldValue("_value.index", id.getSourceAndMetadata().get("_index")); - id.setFieldValue("_value.type", id.getSourceAndMetadata().get("_type")); - id.setFieldValue("_value.id", id.getSourceAndMetadata().get("_id")); + id.setFieldValue("_ingest._value.index", id.getSourceAndMetadata().get("_index")); + id.setFieldValue("_ingest._value.type", id.getSourceAndMetadata().get("_type")); + id.setFieldValue("_ingest._value.id", id.getSourceAndMetadata().get("_id")); }); ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor); processor.execute(ingestDocument); @@ -136,7 +136,7 @@ public class ForEachProcessorTests extends ESTestCase { TemplateService ts = TestTemplateService.instance(); ForEachProcessor processor = new ForEachProcessor( - "_tag", "values", new SetProcessor("_tag", ts.compile("_value.new_field"), (model) -> model.get("other")) + "_tag", "values", new SetProcessor("_tag", ts.compile("_ingest._value.new_field"), (model) -> model.get("other")) ); processor.execute(ingestDocument); @@ -151,8 +151,8 @@ public class ForEachProcessorTests extends ESTestCase { Processor innerProcessor = new Processor() { @Override public void execute(IngestDocument ingestDocument) throws Exception { - String existingValue = ingestDocument.getFieldValue("_value", String.class); - ingestDocument.setFieldValue("_value", existingValue + "."); + String existingValue = ingestDocument.getFieldValue("_ingest._value", String.class); + ingestDocument.setFieldValue("_ingest._value", existingValue + "."); } @Override @@ -184,4 +184,91 @@ public class ForEachProcessorTests extends ESTestCase { } } + public void testModifyFieldsOutsideArray() throws Exception { + List values = new ArrayList<>(); + values.add("string"); + values.add(1); + values.add(null); + IngestDocument ingestDocument = new IngestDocument( + "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values) + ); + + TemplateService ts = TestTemplateService.instance(); + + ForEachProcessor processor = new ForEachProcessor( + "_tag", "values", new CompoundProcessor(false, + Collections.singletonList(new UppercaseProcessor("_tag_upper", "_ingest._value")), + Collections.singletonList(new AppendProcessor("_tag", + ts.compile("errors"), (model) -> (Collections.singletonList("added")))) + )); + processor.execute(ingestDocument); + + List result = ingestDocument.getFieldValue("values", List.class); + assertThat(result.get(0), equalTo("STRING")); + assertThat(result.get(1), equalTo(1)); + assertThat(result.get(2), equalTo(null)); + + List errors = ingestDocument.getFieldValue("errors", List.class); + assertThat(errors.size(), equalTo(2)); + } + + public void testScalarValueAllowsUnderscoreValueFieldToRemainAccessible() throws Exception { + List values = new ArrayList<>(); + values.add("please"); + values.add("change"); + values.add("me"); + Map source = new HashMap<>(); + source.put("_value", "new_value"); + source.put("values", values); + IngestDocument ingestDocument = new IngestDocument( + "_index", "_type", "_id", null, null, null, null, source + ); + + TestProcessor processor = new TestProcessor(doc -> doc.setFieldValue("_ingest._value", + doc.getFieldValue("_source._value", String.class))); + ForEachProcessor forEachProcessor = new ForEachProcessor("_tag", "values", processor); + forEachProcessor.execute(ingestDocument); + + List result = ingestDocument.getFieldValue("values", List.class); + assertThat(result.get(0), equalTo("new_value")); + assertThat(result.get(1), equalTo("new_value")); + assertThat(result.get(2), equalTo("new_value")); + } + + public void testNestedForEach() throws Exception { + List> values = new ArrayList<>(); + List innerValues = new ArrayList<>(); + innerValues.add("abc"); + innerValues.add("def"); + Map value = new HashMap<>(); + value.put("values2", innerValues); + values.add(value); + + innerValues = new ArrayList<>(); + innerValues.add("ghi"); + innerValues.add("jkl"); + value = new HashMap<>(); + value.put("values2", innerValues); + values.add(value); + + IngestDocument ingestDocument = new IngestDocument( + "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values1", values) + ); + + TestProcessor testProcessor = new TestProcessor( + doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_ingest._value", String.class).toUpperCase(Locale.ENGLISH)) + ); + ForEachProcessor processor = new ForEachProcessor( + "_tag", "values1", new ForEachProcessor("_tag", "_ingest._value.values2", testProcessor)); + processor.execute(ingestDocument); + + List result = ingestDocument.getFieldValue("values1.0.values2", List.class); + assertThat(result.get(0), equalTo("ABC")); + assertThat(result.get(1), equalTo("DEF")); + + result = ingestDocument.getFieldValue("values1.1.values2", List.class); + assertThat(result.get(0), equalTo("GHI")); + assertThat(result.get(1), equalTo("JKL")); + } + } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/80_foreach.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/80_foreach.yaml index 48080433fd2..ab2be3bf81c 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/80_foreach.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/80_foreach.yaml @@ -19,7 +19,7 @@ teardown: "field" : "values", "processor" : { "uppercase" : { - "field" : "_value" + "field" : "_ingest._value" } } } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yaml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yaml index b0a729a6299..0e54ff0b7ad 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yaml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yaml @@ -234,7 +234,7 @@ "processor": { "append": { "field": "values_flat", - "value": "{{_value.key}}_{{_value.value}}" + "value": "{{_ingest._value.key}}_{{_ingest._value.value}}" } } } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yaml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yaml index 9a52979b930..1a29531651b 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yaml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yaml @@ -84,7 +84,7 @@ "field" : "friends", "processor" : { "remove" : { - "field" : "_value.id" + "field" : "_ingest._value.id" } } } @@ -106,7 +106,7 @@ "field" : "address", "processor" : { "trim" : { - "field" : "_value" + "field" : "_ingest._value" } } } From 3f344d3154f29d8c873d8002570d65b4e2bdb0ee Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Wed, 27 Jul 2016 08:48:35 +0100 Subject: [PATCH 90/93] [DOCS] fix documentation for selecting algorithm for percentiles agg --- .../metrics/percentile-aggregation.asciidoc | 23 +++++++++++-------- .../percentile-rank-aggregation.asciidoc | 20 ++++++++-------- 2 files changed, 23 insertions(+), 20 deletions(-) diff --git a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc index dc8c8837344..db15d0a6a66 100644 --- a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc @@ -190,7 +190,9 @@ This balance can be controlled using a `compression` parameter: "load_time_outlier" : { "percentiles" : { "field" : "load_time", - "compression" : 200 <1> + "tdigest": { + "compression" : 200 <1> + } } } } @@ -218,11 +220,11 @@ the TDigest will use less memory. experimental[] -https://github.com/HdrHistogram/HdrHistogram[HDR Histogram] (High Dynamic Range Histogram) is an alternative implementation -that can be useful when calculating percentiles for latency measurements as it can be faster than the t-digest implementation -with the trade-off of a larger memory footprint. This implementation maintains a fixed worse-case percentage error (specified -as a number of significant digits). This means that if data is recorded with values from 1 microsecond up to 1 hour -(3,600,000,000 microseconds) in a histogram set to 3 significant digits, it will maintain a value resolution of 1 microsecond +https://github.com/HdrHistogram/HdrHistogram[HDR Histogram] (High Dynamic Range Histogram) is an alternative implementation +that can be useful when calculating percentiles for latency measurements as it can be faster than the t-digest implementation +with the trade-off of a larger memory footprint. This implementation maintains a fixed worse-case percentage error (specified +as a number of significant digits). This means that if data is recorded with values from 1 microsecond up to 1 hour +(3,600,000,000 microseconds) in a histogram set to 3 significant digits, it will maintain a value resolution of 1 microsecond for values up to 1 millisecond and 3.6 seconds (or better) for the maximum tracked value (1 hour). The HDR Histogram can be used by specifying the `method` parameter in the request: @@ -235,17 +237,18 @@ The HDR Histogram can be used by specifying the `method` parameter in the reques "percentiles" : { "field" : "load_time", "percents" : [95, 99, 99.9], - "method" : "hdr", <1> - "number_of_significant_value_digits" : 3 <2> + "hdr": { <1> + "number_of_significant_value_digits" : 3 <2> + } } } } } -------------------------------------------------- -<1> The `method` parameter is set to `hdr` to indicate that HDR Histogram should be used to calculate the percentiles +<1> `hdr` object indicates that HDR Histogram should be used to calculate the percentiles and specific settings for this algorithm can be specified inside the object <2> `number_of_significant_value_digits` specifies the resolution of values for the histogram in number of significant digits -The HDRHistogram only supports positive values and will error if it is passed a negative value. It is also not a good idea to use +The HDRHistogram only supports positive values and will error if it is passed a negative value. It is also not a good idea to use the HDRHistogram if the range of values is unknown as this could lead to high memory usage. ==== Missing value diff --git a/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc index dcb953ae252..d4df92105de 100644 --- a/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc @@ -115,11 +115,11 @@ TIP: for indexed scripts replace the `file` parameter with an `id` parameter. experimental[] -https://github.com/HdrHistogram/HdrHistogram[HDR Histogram] (High Dynamic Range Histogram) is an alternative implementation -that can be useful when calculating percentile ranks for latency measurements as it can be faster than the t-digest implementation -with the trade-off of a larger memory footprint. This implementation maintains a fixed worse-case percentage error (specified as a -number of significant digits). This means that if data is recorded with values from 1 microsecond up to 1 hour (3,600,000,000 -microseconds) in a histogram set to 3 significant digits, it will maintain a value resolution of 1 microsecond for values up to +https://github.com/HdrHistogram/HdrHistogram[HDR Histogram] (High Dynamic Range Histogram) is an alternative implementation +that can be useful when calculating percentile ranks for latency measurements as it can be faster than the t-digest implementation +with the trade-off of a larger memory footprint. This implementation maintains a fixed worse-case percentage error (specified as a +number of significant digits). This means that if data is recorded with values from 1 microsecond up to 1 hour (3,600,000,000 +microseconds) in a histogram set to 3 significant digits, it will maintain a value resolution of 1 microsecond for values up to 1 millisecond and 3.6 seconds (or better) for the maximum tracked value (1 hour). The HDR Histogram can be used by specifying the `method` parameter in the request: @@ -132,17 +132,18 @@ The HDR Histogram can be used by specifying the `method` parameter in the reques "percentile_ranks" : { "field" : "load_time", "values" : [15, 30], - "method" : "hdr", <1> - "number_of_significant_value_digits" : 3 <2> + "hdr": { <1> + "number_of_significant_value_digits" : 3 <2> + } } } } } -------------------------------------------------- -<1> The `method` parameter is set to `hdr` to indicate that HDR Histogram should be used to calculate the percentile_ranks +<1> `hdr` object indicates that HDR Histogram should be used to calculate the percentiles and specific settings for this algorithm can be specified inside the object <2> `number_of_significant_value_digits` specifies the resolution of values for the histogram in number of significant digits -The HDRHistogram only supports positive values and will error if it is passed a negative value. It is also not a good idea to use +The HDRHistogram only supports positive values and will error if it is passed a negative value. It is also not a good idea to use the HDRHistogram if the range of values is unknown as this could lead to high memory usage. ==== Missing value @@ -166,4 +167,3 @@ had a value. -------------------------------------------------- <1> Documents without a value in the `grade` field will fall into the same bucket as documents that have the value `10`. - From 2fdf79d8d4c41a97da22576adddf217def883fec Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 26 Jul 2016 20:05:47 +0200 Subject: [PATCH 91/93] Deprecate template query. Closes #19390 --- docs/reference/migration/migrate_5_0/scripting.asciidoc | 7 ++++++- .../script/mustache/TemplateQueryBuilder.java | 7 +++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/docs/reference/migration/migrate_5_0/scripting.asciidoc b/docs/reference/migration/migrate_5_0/scripting.asciidoc index 3b44f899f59..0f50e6232c3 100644 --- a/docs/reference/migration/migrate_5_0/scripting.asciidoc +++ b/docs/reference/migration/migrate_5_0/scripting.asciidoc @@ -309,4 +309,9 @@ transportClient.addTransportAddress( -------------------------------------------------- Also the helper methods in `QueryBuilders` class that create a `TemplateQueryBuilder` instance have been removed, -instead the constructors on `TemplateQueryBuilder` should be used. \ No newline at end of file +instead the constructors on `TemplateQueryBuilder` should be used. + +==== Template query + +The `template` query has been deprecated in favour of the search template api. The `template` query is scheduled +to be removed in the next major version. \ No newline at end of file diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java index 297ff841a1f..eb32a5024d1 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java @@ -23,6 +23,8 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -47,8 +49,12 @@ import java.util.Optional; /** * Facilitates creating template query requests. * */ +@Deprecated +// TODO remove this class in 6.0 public class TemplateQueryBuilder extends AbstractQueryBuilder { + public static final String NAME = "template"; + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(TemplateQueryBuilder.class)); /** Template to fill. */ private final Script template; @@ -63,6 +69,7 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder Date: Wed, 27 Jul 2016 10:01:36 +0200 Subject: [PATCH 92/93] removed useless comment and make sure all public constructors delegate to the package protected constructor. --- .../elasticsearch/script/mustache/TemplateQueryBuilder.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java index eb32a5024d1..4a19bcb4b93 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java @@ -60,14 +60,13 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder params) { - this.template = new Script(template, scriptType, "mustache", params); + this(new Script(template, scriptType, "mustache", params)); } public TemplateQueryBuilder(String template, ScriptService.ScriptType scriptType, Map params, XContentType ct) { - this.template = new Script(template, scriptType, "mustache", params, ct); + this(new Script(template, scriptType, "mustache", params, ct)); } - // for tests, so that mock script can be used: TemplateQueryBuilder(Script template) { DEPRECATION_LOGGER.deprecated("[{}] query is deprecated, use search template api instead", NAME); if (template == null) { From 4162582ee894261f2c16d38e86cf7088d44d5f0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 27 Jul 2016 11:39:59 +0200 Subject: [PATCH 93/93] Adapt to renaming of RestTestCandidate --- .../elasticsearch/action/quality/RankEvalRestIT.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/action/quality/RankEvalRestIT.java b/modules/rank-eval/src/test/java/org/elasticsearch/action/quality/RankEvalRestIT.java index 5a7c92b97bc..dc33d6ac439 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/action/quality/RankEvalRestIT.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/action/quality/RankEvalRestIT.java @@ -22,19 +22,19 @@ package org.elasticsearch.action.quality; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; public class RankEvalRestIT extends ESClientYamlSuiteTestCase { - public RankEvalRestIT(@Name("yaml") RestTestCandidate testCandidate) { + public RankEvalRestIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { + public static Iterable parameters() throws IOException, ClientYamlTestParseException { return ESClientYamlSuiteTestCase.createParameters(0, 1); } }