Compare commits

...

126 Commits
main ... 4.1.15

Author SHA1 Message Date
Jens Schauder
0491165f22
Release version 4.1.15 (2020.0.15).
See #1963
2021-11-12 09:37:00 +01:00
Jens Schauder
b8ea301a1f
Prepare 4.1.15 (2020.0.15).
See #1963
2021-11-12 09:36:36 +01:00
jongchan lee
2b3f1730dc
Add scrolltime condition when using SearchRequest.
Original Pull Request #1975
Closes #1974

(cherry picked from commit f1b4a54bc23ee3944462c640966672b6310eebf8)
(cherry picked from commit c1ab4d66e06587295c2157b0cc0df4df5bdeb662)
2021-10-26 18:51:55 +02:00
Mark Paluch
8d984cdd04
After release cleanups.
See #1935
2021-10-18 11:02:39 +02:00
Mark Paluch
ee64017b1a
Prepare next development iteration.
See #1935
2021-10-18 11:02:36 +02:00
Mark Paluch
f322caa110
Release version 4.1.14 (2020.0.14).
See #1935
2021-10-18 10:53:53 +02:00
Mark Paluch
992d86f980
Prepare 4.1.14 (2020.0.14).
See #1935
2021-10-18 10:53:24 +02:00
Mark Paluch
ef52222be8
After release cleanups.
See #1897
2021-09-17 09:04:20 +02:00
Mark Paluch
5a7f7594da
Prepare next development iteration.
See #1897
2021-09-17 09:04:17 +02:00
Mark Paluch
eb4c8e02f7
Release version 4.1.13 (2020.0.13).
See #1897
2021-09-17 08:53:40 +02:00
Mark Paluch
81791dd68d
Prepare 4.1.13 (2020.0.13).
See #1897
2021-09-17 08:53:11 +02:00
Peter-Josef Meisch
e066c10a30
Polishing
(cherry picked from commit 6941e31ba4a735086b165024182ad1cfaa664455)
(cherry picked from commit 30bc91c7539f500a03a182526a8bcaf687e18f07)
2021-09-10 08:59:05 +02:00
Nic Hines
ea4cc07146
Change mapping of connectionRequestTimeout to ConnPool leaseTimeout.
Original Pull Request: #1925
Closes: #1926

(cherry picked from commit 3b8f0c9d567d4c91ae937e6b87752d81cda4c0a7)
(cherry picked from commit 92806d2e1194bc470244deae8271b0e74ca3288b)
2021-09-10 08:59:05 +02:00
Peter-Josef Meisch
aca34eefeb
Fix @Query method implementation for unpaged queries.
Original Pull Request #1919
Closes #1917

(cherry picked from commit e71758686c35b6ad22e9e952f95a4aabd0909c43)
(cherry picked from commit 2dd0a6771fa955543c350b431bd043dd5083d99a)
2021-09-03 22:25:52 +02:00
Jens Schauder
1348ee3a92
After release cleanups.
See #1874
2021-08-12 10:36:58 +02:00
Jens Schauder
e010257c7d
Prepare next development iteration.
See #1874
2021-08-12 10:36:56 +02:00
Jens Schauder
a1aa6f209a
Release version 4.1.12 (2020.0.12).
See #1874
2021-08-12 10:25:55 +02:00
Jens Schauder
a19aa54619
Prepare 4.1.12 (2020.0.12).
See #1874
2021-08-12 10:25:34 +02:00
Peter-Josef Meisch
65aa371d64
Fix NPE on IndexQuery with source and version.
Original Pull Request #1894
Closes #1893

(cherry picked from commit 36b449c3852fee3b4cdeb0760058c4265d8a176c)
(cherry picked from commit c0781efbaad16bb4d9a943f9692bf6b356609bda)
2021-08-06 22:58:47 +02:00
Peter-Josef Meisch
b3da1786ca
Fix http URL in license header
(cherry picked from commit e6869bcdfdfc5dd26477bea76b4920ddf73d1883)
(cherry picked from commit d371404f900075f70bf3b71b1a8b761a9e08991b)
2021-07-22 07:49:49 +02:00
Peter-Josef Meisch
9921b46781
Upgrade maven wrapper to use maven 3.8.1.
Original Pull Request #1878
Closes #1877

(cherry picked from commit d2e3ea26b80aaf661363a920254350abe915c36c)
(cherry picked from commit cc5b4fa635ffc6461af82e9e178bf1b0ffc3ad32)
2021-07-22 07:49:48 +02:00
Peter-Josef Meisch
aa78e4f026
Polishing.
(cherry picked from commit d3e8c9fce5f788cc1d5a6289eda6ce34325234fe)
(cherry picked from commit deae205fd41a80ea051d05fc2bfc50893baee702)
2021-07-17 19:47:34 +02:00
Frnandu Martinski
67cab66062
Fix uri encode bug when url path start with '/'.
Original Pull Request #1873
Closes #1870

(cherry picked from commit d88fb037dadc6798641d51876d81ea64c43bf36b)
(cherry picked from commit 796a5ebe34f588efa4ad83932149fc4081e248a4)
2021-07-17 19:47:33 +02:00
Jens Schauder
71f5fa23b7
After release cleanups.
See #1849
2021-07-16 10:45:20 +02:00
Jens Schauder
59075075c5
Prepare next development iteration.
See #1849
2021-07-16 10:45:18 +02:00
Jens Schauder
208824dd9f
Release version 4.1.11 (2020.0.11).
See #1849
2021-07-16 10:19:53 +02:00
Jens Schauder
cff246ba14
Prepare 4.1.11 (2020.0.11).
See #1849
2021-07-16 10:18:58 +02:00
Jens Schauder
fddb0af117
Updated changelog.
See #1849
2021-07-16 10:18:56 +02:00
Peter-Josef Meisch
626b274677
Use registered converters for parameters of @Query annotated methods.
Original Pull Request #1867
Closes #1866

(cherry picked from commit 27094724dcf1a9f3932c86adff6cb58341ec2016)
(cherry picked from commit 303438ae6398bc02f06115e752791e5ef896939f)
2021-07-14 20:34:51 +02:00
Niklas Herder
979c164135
Support collection parameters in @Query methods.
Original Pull Request #1856
Closes #1858

(cherry picked from commit 6f84a1c589998ffb47b8a0f97446073ecaa396b8)
(cherry picked from commit 254948d1c9ca3e517f26906c6fe79c6b368e6921)
2021-07-03 18:21:36 +02:00
Sascha Woo
84a74f5921
Add missing hashCode and equals methods to JoinField.
Original Pull Request #1847
Closes #1846

(cherry picked from commit a16a87f3fa42c96566fac06e89aa703767a4842e)
(cherry picked from commit 0bb239a67465fd1304d0f318690b301cd6f3b4fe)
2021-06-23 20:50:29 +02:00
Mark Paluch
58925fca98
Updated changelog.
See #1814
2021-06-22 16:07:30 +02:00
Mark Paluch
fc32e0e57a
After release cleanups.
See #1813
2021-06-22 15:28:52 +02:00
Mark Paluch
98ca6a1755
Prepare next development iteration.
See #1813
2021-06-22 15:28:49 +02:00
Mark Paluch
d4962c11fc
Release version 4.1.10 (2020.0.10).
See #1813
2021-06-22 15:18:24 +02:00
Mark Paluch
698684c4bd
Prepare 4.1.10 (2020.0.10).
See #1813
2021-06-22 15:17:56 +02:00
Mark Paluch
a67a700fa8
Updated changelog.
See #1813
2021-06-22 15:17:52 +02:00
Peter-Josef Meisch
93cf9ab794
Pageable results and @Query annotation.
Original Pull Request #1844
Closes #1843
2021-06-15 22:13:39 +02:00
Peter-Josef Meisch
73f11a0618
Adapt XNamedContents used by ReactiveElasticsearchClient for missing entries (terms and aggregations).
Original Pull Request #1837
Closes #1834

(cherry picked from commit 38dc7fb0fb493f0944985bcfd809ebefc3d12d45)
(cherry picked from commit 45a0e2213f13a74e8d27ded49a403f4b37b2f5b9)
2021-06-02 22:57:15 +02:00
Mark Paluch
038401f6ce
Updated changelog.
See #1775
2021-05-14 12:36:43 +02:00
Mark Paluch
8fcf9e2140
After release cleanups.
See #1774
2021-05-14 12:05:22 +02:00
Mark Paluch
cc87eaf602
Prepare next development iteration.
See #1774
2021-05-14 12:05:18 +02:00
Mark Paluch
a5d0347224
Release version 4.1.9 (2020.0.9).
See #1774
2021-05-14 11:52:25 +02:00
Mark Paluch
745c69ca39
Prepare 4.1.9 (2020.0.9).
See #1774
2021-05-14 11:51:55 +02:00
Mark Paluch
12365c19cf
Updated changelog.
See #1774
2021-05-14 11:51:51 +02:00
Peter-Josef Meisch
26a3b324b7
SearchPage result in StringQuery methods.
Original Pull Request #1812
Closes #1811

(cherry picked from commit e96d09fa51e7ee8450513657f51dbd55c4a54641)
(cherry picked from commit ad6022f64ca6071e1d139ae97c39b703570b49d2)
2021-05-14 06:55:47 +02:00
Peter-Josef Meisch
752693399e
Escape strings with quotes in custom query parameters.
Original Pull Request #1793
Closes #1790

(cherry picked from commit f8fbf7721a23d291346bf500bdeb83ff36ef6559)
(cherry picked from commit 40972b21e02e47d2b13a39a558f908e1f8298e35)
2021-04-29 06:33:15 +02:00
Peter-Josef Meisch
2cb9e30b61
Search with MoreLikeThisQuery should use Pageable.
Original Pull Request #1789
Closes #1787

(cherry picked from commit a2ca312fb2812bd34781206e47be31e9e43dac00)
(cherry picked from commit 85af54635d77b37bfa25178b571404d335a87ead)

# Conflicts:
#	src/test/java/org/springframework/data/elasticsearch/core/ElasticsearchTemplateTests.java
2021-04-26 22:45:02 +02:00
Peter-Josef Meisch
461d70e173
Fix documentation.
Original Pull Request #1786
Closes #1785

(cherry picked from commit 8b7f0f8327a30f0c3017d500564344a8bce8a0b2)
2021-04-23 17:52:52 +02:00
Greg L. Turnquist
352e74248a
Authenticate with artifactory.
See #1750.
2021-04-20 10:43:26 -05:00
Peter-Josef Meisch
73d52cd686
DynamicMapping annotation should be applicable to any object field.
Original Pull Request #1779
Closes #1767
2021-04-17 20:01:08 +02:00
Mark Paluch
532bb85d77
Updated changelog.
See #1750
2021-04-14 14:40:05 +02:00
Mark Paluch
f6e17d1af3
After release cleanups.
See #1751
2021-04-14 11:42:10 +02:00
Mark Paluch
62f483cd2c
Prepare next development iteration.
See #1751
2021-04-14 11:42:06 +02:00
Mark Paluch
6f8ad00dfa
Release version 4.1.8 (2020.0.8).
See #1751
2021-04-14 11:33:27 +02:00
Mark Paluch
e2fbef632c
Prepare 4.1.8 (2020.0.8).
See #1751
2021-04-14 11:32:49 +02:00
Mark Paluch
d131ccafa2
Updated changelog.
See #1751
2021-04-14 11:32:46 +02:00
Mark Paluch
458b05ed37
Updated changelog.
See #1730
2021-04-14 11:17:44 +02:00
Peter-Josef Meisch
1c9f3d2f9a
Fix reactive connection handling.
Original Pull Request #1766
Closes #1759

(cherry picked from commit 58bca88386d9de7ea3946f7691c63bf31ce4ece2)
2021-04-08 23:11:35 +02:00
Mark Paluch
2b2bc3e575
After release cleanups.
See #1731
2021-03-31 18:29:42 +02:00
Mark Paluch
5bd0f9cc2d
Prepare next development iteration.
See #1731
2021-03-31 18:29:39 +02:00
Mark Paluch
589961022d
Release version 4.1.7 (2020.0.7).
See #1731
2021-03-31 18:19:53 +02:00
Mark Paluch
851525a052
Prepare 4.1.7 (2020.0.7).
See #1731
2021-03-31 18:19:21 +02:00
Mark Paluch
1bc42a9a9a
Updated changelog.
See #1731
2021-03-31 18:19:17 +02:00
Mark Paluch
971dcda1f5
Updated changelog.
See #1699
2021-03-31 17:26:10 +02:00
Mark Paluch
93d03830e0
Updated changelog.
See #1709
2021-03-17 11:31:33 +01:00
Mark Paluch
16ce9444da
After release cleanups.
See #1702
2021-03-17 11:02:16 +01:00
Mark Paluch
d59c32d29e
Prepare next development iteration.
See #1702
2021-03-17 11:02:14 +01:00
Mark Paluch
5713e5bfa6
Release version 4.1.6 (2020.0.6).
See #1702
2021-03-17 10:54:13 +01:00
Mark Paluch
56a2548156
Prepare 4.1.6 (2020.0.6).
See #1702
2021-03-17 10:53:44 +01:00
Mark Paluch
91ff345cf2
Updated changelog.
See #1702
2021-03-17 10:53:42 +01:00
Mark Paluch
168242fbd0
Updated changelog.
See #1697
2021-03-17 10:35:17 +01:00
Peter-Josef Meisch
138760dad7
DefaultReactiveElasticsearchClient handle 5xx error with empty body
Original Pull Request #1713
Closes #1712

(cherry picked from commit 6634d0075ace745e17d34d655e15d21abc0fb786)
2021-03-03 06:27:37 +01:00
Christoph Strobl
4bb6cb6f86 Updated changelog.
See #1701
2021-02-18 11:37:51 +01:00
Christoph Strobl
09af4f9917 After release cleanups.
See #1698
2021-02-18 11:12:46 +01:00
Christoph Strobl
cdd0df7dd4 Prepare next development iteration.
See #1698
2021-02-18 11:12:45 +01:00
Christoph Strobl
3fee02c17e Release version 4.1.5 (2020.0.5).
See #1698
2021-02-18 10:59:16 +01:00
Christoph Strobl
a92b57aedd Prepare 4.1.5 (2020.0.5).
See #1698
2021-02-18 10:58:50 +01:00
Christoph Strobl
0b6a77be3b Updated changelog.
See #1698
2021-02-18 10:58:48 +01:00
Christoph Strobl
9a2b3b1317 Updated changelog.
See #1643
2021-02-17 14:20:38 +01:00
Christoph Strobl
171ea62b9b After release cleanups.
See #1642
2021-02-17 13:41:55 +01:00
Christoph Strobl
3b2ff95702 Prepare next development iteration.
See #1642
2021-02-17 13:41:54 +01:00
Christoph Strobl
51fb872bea Release version 4.1.4 (2020.0.4).
See #1642
2021-02-17 12:00:24 +01:00
Christoph Strobl
67c570606a Prepare 4.1.4 (2020.0.4).
See #1642
2021-02-17 11:59:37 +01:00
Christoph Strobl
eb311d089f Updated changelog.
See #1642
2021-02-17 11:59:36 +01:00
Christoph Strobl
93492f86da Updated changelog.
See #1570
2021-02-17 11:34:29 +01:00
Christoph Strobl
88b2fa970d Updated changelog.
See #1569
2021-02-17 10:58:28 +01:00
Peter-Josef Meisch
11e4ebf12c
Allow CustomConversions for entities - adaption for 4.1.x. 2021-01-29 11:34:11 +01:00
Peter-Josef Meisch
1272fdc188
Allow CustomConversions for entities.
Original PullRequest #1672
Closes #1667

(cherry picked from commit 0ac1b4af00b14cb9509986ab13db0eab44dba4ab)
2021-01-29 11:09:34 +01:00
Peter-Josef Meisch
96eac3ba73
ReactiveElasticsearchOperations indexName is encoded twice.
Original Pull Request #1666
Closes #1665

(cherry picked from commit 4829b07e53fcbea4b391a6688fd70a580f5a62ab)
2021-01-25 21:37:17 +01:00
Peter-Josef Meisch
c47d284e6b
Fix source filter setup in multiget requests.
Original Pull Request #1664
Closes #1659

(cherry picked from commit 1a02c1e05ae9cfa81b9010dd6872d0c348466399)
2021-01-24 19:59:26 +01:00
Peter-Josef Meisch
f5d651b497
Documentation fix.
Original Pull Request #1663
Closes #1662

(cherry picked from commit 1aabb42355e07f9d6e65a2a0d02569b3a0f01a2d)
2021-01-23 20:10:27 +01:00
Peter-Josef Meisch
d35d38f7d5
GeoJson types can be lowercase in Elasticsearch.
Original Pull Request #1657
Closes #1655

(cherry picked from commit 159520d00186d4016bb681eb202a47d26ab68103)
2021-01-19 20:29:02 +01:00
Christoph Strobl
2883280b00 Updated changelog.
See #1571
2021-01-13 15:49:53 +01:00
Christoph Strobl
fedd97624e After release cleanups.
See #1572
2021-01-13 15:01:50 +01:00
Christoph Strobl
9fab24d5ef Prepare next development iteration.
See #1572
2021-01-13 15:01:49 +01:00
Christoph Strobl
bf32581105 Release version 4.1.3 (2020.0.3).
See #1572
2021-01-13 14:18:34 +01:00
Christoph Strobl
7f89d79b12 Prepare 4.1.3 (2020.0.3).
See #1572
2021-01-13 14:17:43 +01:00
Christoph Strobl
955eb77add Updated changelog.
See #1572
2021-01-13 14:17:43 +01:00
Peter-Josef Meisch
0086d35e54
#1634 - Update Testcontainers dependency.
Original Pull Request: #1635
Closes #1634

(cherry picked from commit 6913d8045bd6e9cc6dbeacccc7756fad987f30e6)
2021-01-07 22:13:38 +01:00
Greg L. Turnquist
53f0e79990
DATAES-996 - Use Docker hub credentials for all CI jobs. 2020-12-17 08:41:33 -06:00
Mark Paluch
3da9eaadc5
DATAES-973 - After release cleanups. 2020-12-09 16:46:55 +01:00
Mark Paluch
310f6f0f22
DATAES-973 - Prepare next development iteration. 2020-12-09 16:46:49 +01:00
Mark Paluch
262af9f0e8
DATAES-973 - Release version 4.1.2 (2020.0.2). 2020-12-09 16:01:27 +01:00
Mark Paluch
bc74126d7e
DATAES-973 - Prepare 4.1.2 (2020.0.2). 2020-12-09 16:00:56 +01:00
Mark Paluch
308de7f8db
DATAES-973 - Updated changelog. 2020-12-09 16:00:51 +01:00
Mark Paluch
ae5f72fb62
DATAES-966 - Updated changelog. 2020-12-09 15:33:31 +01:00
Mark Paluch
170facc3f3
DATAES-964 - Updated changelog. 2020-12-09 12:42:33 +01:00
Mark Paluch
0aaffebf16
DATAES-963 - Updated changelog. 2020-12-09 09:59:18 +01:00
Peter-Josef Meisch
d1da6ac4ed
DATAES-543 - Adjust configuration support classes so they do not require proxying.
Original PR: #557

(cherry picked from commit 54727229e1bfc8e88fdf45025b286c1e9bf29cfa)
2020-12-08 20:36:11 +01:00
Peter-Josef Meisch
133bc315ed
DATAES-990 - Index creation fails with Authentication object cannot be null on startup.
Only do a SpEL resolution if there is a SpEL expressin in the index name; resolve ExpressionDependencies.

Original PR: #565

(cherry picked from commit 6edb8353b5ccfe6ddf4fb28d6450e090e1373ed0)
2020-12-05 12:22:31 +01:00
Peter-Josef Meisch
9f243fd2c9
DATAES-991 - Wrong value for TermVector(with_positions_offets_payloads).
Original PR: #564

(cherry picked from commit 6a6ead5e1ec866812f7bf44af77e587851402ad1)
2020-12-04 08:41:46 +01:00
Peter-Josef Meisch
de57159c7f
DATAES-987 - IndexOperations getMapping fail when using index alias.
Original PR: #560

(cherry picked from commit 7912ae977945846347acbeb8a223ccd3fa64158b)
2020-11-26 07:22:41 +01:00
Peter-Josef Meisch
74ed69877d
DATAES-978 - Accept DateFormat.none for a date property to enable custom Converters.
Original pR: #556

(cherry picked from commit 04ceed29054d167a8b1bff90760f9d79048cd445)
2020-11-19 23:14:22 +01:00
Peter-Josef Meisch
7314bfc21d DATAES-972 - BeforeConvertCallback should be called before index query is built.
Originap PR: #555

(cherry picked from commit 98043348f7d466fe52ad933e4d90421fa756329f)
2020-11-16 13:44:37 +01:00
Peter-Josef Meisch
5909c19ead
DATAES-977 - Fix versions in reference documentation for 4.1. 2020-11-12 19:13:57 +01:00
Mark Paluch
9df27ef289
DATAES-965 - After release cleanups. 2020-11-11 12:14:54 +01:00
Mark Paluch
28d92359b2
DATAES-965 - Prepare next development iteration. 2020-11-11 12:14:51 +01:00
Mark Paluch
d8b0d526b4
DATAES-965 - Release version 4.1.1 (2020.0.1). 2020-11-11 11:59:00 +01:00
Mark Paluch
dc832e75a6
DATAES-965 - Prepare 4.1.1 (2020.0.1). 2020-11-11 11:58:36 +01:00
Mark Paluch
866cd96477
DATAES-965 - Updated changelog. 2020-11-11 11:58:33 +01:00
Peter-Josef Meisch
162c57df31
DATAES-969 - Use ResultProcessor in ElasticsearchPartQuery to build PartTree.
Original PR: #546

(cherry picked from commit d036693f0510748537c682a5ede99c23938b5250)
2020-11-07 18:28:59 +01:00
Mark Paluch
3600452796
DATAES-968 - Enable Maven caching for Jenkins jobs. 2020-10-30 08:36:23 +01:00
Mark Paluch
4b1c4c8000
DATAES-950 - Enable maintenance branch build. 2020-10-29 09:53:38 +01:00
Mark Paluch
70d556e526
DATAES-950 - After release cleanups. 2020-10-28 16:10:55 +01:00
Mark Paluch
214d91f3c1
DATAES-950 - Prepare next development iteration. 2020-10-28 16:10:52 +01:00
73 changed files with 2664 additions and 663 deletions

117
.mvn/wrapper/MavenWrapperDownloader.java vendored Normal file
View File

@ -0,0 +1,117 @@
/*
* Copyright 2007-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.net.*;
import java.io.*;
import java.nio.channels.*;
import java.util.Properties;
public class MavenWrapperDownloader {
private static final String WRAPPER_VERSION = "0.5.6";
/**
* Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
*/
private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
+ WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
/**
* Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
* use instead of the default one.
*/
private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
".mvn/wrapper/maven-wrapper.properties";
/**
* Path where the maven-wrapper.jar will be saved to.
*/
private static final String MAVEN_WRAPPER_JAR_PATH =
".mvn/wrapper/maven-wrapper.jar";
/**
* Name of the property which should be used to override the default download url for the wrapper.
*/
private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
public static void main(String args[]) {
System.out.println("- Downloader started");
File baseDirectory = new File(args[0]);
System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
// If the maven-wrapper.properties exists, read it and check if it contains a custom
// wrapperUrl parameter.
File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
String url = DEFAULT_DOWNLOAD_URL;
if(mavenWrapperPropertyFile.exists()) {
FileInputStream mavenWrapperPropertyFileInputStream = null;
try {
mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
Properties mavenWrapperProperties = new Properties();
mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
} catch (IOException e) {
System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
} finally {
try {
if(mavenWrapperPropertyFileInputStream != null) {
mavenWrapperPropertyFileInputStream.close();
}
} catch (IOException e) {
// Ignore ...
}
}
}
System.out.println("- Downloading from: " + url);
File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
if(!outputFile.getParentFile().exists()) {
if(!outputFile.getParentFile().mkdirs()) {
System.out.println(
"- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
}
}
System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
try {
downloadFileFromURL(url, outputFile);
System.out.println("Done");
System.exit(0);
} catch (Throwable e) {
System.out.println("- Error downloading");
e.printStackTrace();
System.exit(1);
}
}
private static void downloadFileFromURL(String urlString, File destination) throws Exception {
if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
String username = System.getenv("MVNW_USERNAME");
char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
Authenticator.setDefault(new Authenticator() {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(username, password);
}
});
}
URL website = new URL(urlString);
ReadableByteChannel rbc;
rbc = Channels.newChannel(website.openStream());
FileOutputStream fos = new FileOutputStream(destination);
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
fos.close();
rbc.close();
}
}

BIN
.mvn/wrapper/maven-wrapper.jar vendored Executable file → Normal file

Binary file not shown.

3
.mvn/wrapper/maven-wrapper.properties vendored Executable file → Normal file
View File

@ -1 +1,2 @@
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.1/apache-maven-3.8.1-bin.zip
wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar

View File

@ -9,7 +9,7 @@ image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-elasticsearch%2
Since this pipeline is purely Docker-based, it's easy to:
* Debug what went wrong on your local machine.
* Test out a a tweak to your `test.sh` script before sending it out.
* Test out a a tweak to your `verify.sh` script before sending it out.
* Experiment against a new image before submitting your pull request.
All of these use cases are great reasons to essentially run what the CI server does on your local machine.

110
Jenkinsfile vendored
View File

@ -3,7 +3,7 @@ pipeline {
triggers {
pollSCM 'H/10 * * * *'
upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS)
upstream(upstreamProjects: "spring-data-commons/2.4.x", threshold: hudson.model.Result.SUCCESS)
}
options {
@ -15,71 +15,86 @@ pipeline {
stage("test: baseline (jdk8)") {
when {
anyOf {
branch 'master'
branch '4.1.x'
not { triggeredBy 'UpstreamCause' }
}
}
agent {
docker {
image 'adoptopenjdk/openjdk8:latest'
label 'data'
args '-v $HOME:/tmp/jenkins-home'
args '-u root -v /var/run/docker.sock:/var/run/docker.sock'
}
}
options { timeout(time: 30, unit: 'MINUTES') }
environment {
DOCKER_HUB = credentials('hub.docker.com-springbuildmaster')
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
}
steps {
sh 'mkdir -p /tmp/jenkins-home'
sh 'chown -R 1001:1001 .'
sh 'rm -rf ?'
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list verify -Dsort -U -B'
sh 'chown -R 1001:1001 .'
script {
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
docker.image('adoptopenjdk/openjdk8:latest').inside('-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home') {
sh "docker login --username ${DOCKER_HUB_USR} --password ${DOCKER_HUB_PSW}"
sh 'PROFILE=none ci/verify.sh'
sh "ci/clean.sh"
}
}
}
}
}
stage("Test other configurations") {
when {
allOf {
branch 'master'
branch '4.1.x'
not { triggeredBy 'UpstreamCause' }
}
}
parallel {
stage("test: baseline (jdk11)") {
agent {
docker {
image 'adoptopenjdk/openjdk11:latest'
label 'data'
args '-v $HOME:/tmp/jenkins-home'
args '-u root -v /var/run/docker.sock:/var/run/docker.sock'
}
}
options { timeout(time: 30, unit: 'MINUTES') }
environment {
DOCKER_HUB = credentials('hub.docker.com-springbuildmaster')
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
}
steps {
sh 'mkdir -p /tmp/jenkins-home'
sh 'chown -R 1001:1001 .'
sh 'rm -rf ?'
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pjava11 clean dependency:list verify -Dsort -U -B'
sh 'chown -R 1001:1001 .'
script {
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
docker.image('adoptopenjdk/openjdk11:latest').inside('-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home') {
sh "docker login --username ${DOCKER_HUB_USR} --password ${DOCKER_HUB_PSW}"
sh 'PROFILE=java11 ci/verify.sh'
sh "ci/clean.sh"
}
}
}
}
}
stage("test: baseline (jdk15)") {
agent {
docker {
image 'adoptopenjdk/openjdk15:latest'
label 'data'
args '-v $HOME:/tmp/jenkins-home'
args '-u root -v /var/run/docker.sock:/var/run/docker.sock'
}
}
options { timeout(time: 30, unit: 'MINUTES') }
environment {
DOCKER_HUB = credentials('hub.docker.com-springbuildmaster')
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
}
steps {
sh 'mkdir -p /tmp/jenkins-home'
sh 'chown -R 1001:1001 .'
sh 'rm -rf ?'
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pjava11 clean dependency:list verify -Dsort -U -B'
sh 'chown -R 1001:1001 .'
script {
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
docker.image('adoptopenjdk/openjdk15:latest').inside('-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home') {
sh "docker login --username ${DOCKER_HUB_USR} --password ${DOCKER_HUB_PSW}"
sh 'PROFILE=java11 ci/verify.sh'
sh "ci/clean.sh"
}
}
}
}
}
}
@ -88,16 +103,12 @@ pipeline {
stage('Release to artifactory') {
when {
anyOf {
branch 'master'
branch '4.1.x'
not { triggeredBy 'UpstreamCause' }
}
}
agent {
docker {
image 'adoptopenjdk/openjdk8:latest'
label 'data'
args '-v $HOME:/tmp/jenkins-home'
}
}
options { timeout(time: 20, unit: 'MINUTES') }
@ -106,8 +117,10 @@ pipeline {
}
steps {
sh 'rm -rf ?'
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
script {
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') {
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch-non-root ' +
'-Dartifactory.server=https://repo.spring.io ' +
"-Dartifactory.username=${ARTIFACTORY_USR} " +
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
@ -117,16 +130,15 @@ pipeline {
'-Dmaven.test.skip=true clean deploy -U -B'
}
}
}
}
}
stage('Publish documentation') {
when {
branch 'master'
branch '4.1.x'
}
agent {
docker {
image 'adoptopenjdk/openjdk8:latest'
label 'data'
args '-v $HOME:/tmp/jenkins-home'
}
}
options { timeout(time: 20, unit: 'MINUTES') }
@ -135,7 +147,10 @@ pipeline {
}
steps {
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,distribute ' +
script {
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') {
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,distribute -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch-non-root ' +
'-Dartifactory.server=https://repo.spring.io ' +
"-Dartifactory.username=${ARTIFACTORY_USR} " +
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
@ -144,6 +159,9 @@ pipeline {
}
}
}
}
}
}
post {
changed {

202
LICENSE.txt Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
https://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

6
ci/clean.sh Executable file
View File

@ -0,0 +1,6 @@
#!/bin/bash -x
set -euo pipefail
MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" \
./mvnw -s settings.xml clean -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch

10
ci/verify.sh Executable file
View File

@ -0,0 +1,10 @@
#!/bin/bash -x
set -euo pipefail
mkdir -p /tmp/jenkins-home/.m2/spring-data-elasticsearch
chown -R 1001:1001 .
MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" \
./mvnw -s settings.xml \
-P${PROFILE} clean dependency:list verify -Dsort -U -B -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-elasticsearch

34
mvnw vendored
View File

@ -8,7 +8,7 @@
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
@ -19,7 +19,7 @@
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# Maven2 Start Up Batch script
# Maven Start Up Batch script
#
# Required ENV vars:
# ------------------
@ -114,7 +114,6 @@ if $mingw ; then
M2_HOME="`(cd "$M2_HOME"; pwd)`"
[ -n "$JAVA_HOME" ] &&
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
# TODO classpath?
fi
if [ -z "$JAVA_HOME" ]; then
@ -212,7 +211,11 @@ else
if [ "$MVNW_VERBOSE" = true ]; then
echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
fi
jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
if [ -n "$MVNW_REPOURL" ]; then
jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
else
jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
fi
while IFS="=" read key value; do
case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
esac
@ -221,22 +224,38 @@ else
echo "Downloading from: $jarUrl"
fi
wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
if $cygwin; then
wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"`
fi
if command -v wget > /dev/null; then
if [ "$MVNW_VERBOSE" = true ]; then
echo "Found wget ... using wget"
fi
if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
wget "$jarUrl" -O "$wrapperJarPath"
else
wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath"
fi
elif command -v curl > /dev/null; then
if [ "$MVNW_VERBOSE" = true ]; then
echo "Found curl ... using curl"
fi
curl -o "$wrapperJarPath" "$jarUrl"
if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
curl -o "$wrapperJarPath" "$jarUrl" -f
else
curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f
fi
else
if [ "$MVNW_VERBOSE" = true ]; then
echo "Falling back to using Java to download"
fi
javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
# For Cygwin, switch paths to Windows format before running javac
if $cygwin; then
javaClass=`cygpath --path --windows "$javaClass"`
fi
if [ -e "$javaClass" ]; then
if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
if [ "$MVNW_VERBOSE" = true ]; then
@ -277,6 +296,11 @@ if $cygwin; then
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
fi
# Provide a "standardized" way to retrieve the CLI args that will
# work with both Windows and non-Windows executions.
MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
export MAVEN_CMD_LINE_ARGS
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
exec "$JAVACMD" \

35
mvnw.cmd vendored Executable file → Normal file
View File

@ -7,7 +7,7 @@
@REM "License"); you may not use this file except in compliance
@REM with the License. You may obtain a copy of the License at
@REM
@REM https://www.apache.org/licenses/LICENSE-2.0
@REM http://www.apache.org/licenses/LICENSE-2.0
@REM
@REM Unless required by applicable law or agreed to in writing,
@REM software distributed under the License is distributed on an
@ -18,7 +18,7 @@
@REM ----------------------------------------------------------------------------
@REM ----------------------------------------------------------------------------
@REM Maven2 Start Up Batch script
@REM Maven Start Up Batch script
@REM
@REM Required ENV vars:
@REM JAVA_HOME - location of a JDK home dir
@ -26,7 +26,7 @@
@REM Optional ENV vars
@REM M2_HOME - location of maven2's installed home dir
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
@REM e.g. to debug Maven itself, use
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
@ -37,7 +37,7 @@
@echo off
@REM set title of command window
title %0
@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
@REM set %HOME% to equivalent of $HOME
@ -120,23 +120,44 @@ SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO (
set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
)
@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
if exist %WRAPPER_JAR% (
if "%MVNW_VERBOSE%" == "true" (
echo Found %WRAPPER_JAR%
)
) else (
if not "%MVNW_REPOURL%" == "" (
SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
)
if "%MVNW_VERBOSE%" == "true" (
echo Couldn't find %WRAPPER_JAR%, downloading it ...
echo Downloading from: %DOWNLOAD_URL%
powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"
)
powershell -Command "&{"^
"$webclient = new-object System.Net.WebClient;"^
"if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
"$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
"}"^
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
"}"
if "%MVNW_VERBOSE%" == "true" (
echo Finished downloading %WRAPPER_JAR%
)
)
@REM End of extension
@REM Provide a "standardized" way to retrieve the CLI args that will
@REM work with both Windows and non-Windows executions.
set MAVEN_CMD_LINE_ARGS=%*
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
if ERRORLEVEL 1 goto error
goto end

View File

@ -5,12 +5,12 @@
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-elasticsearch</artifactId>
<version>4.1.0</version>
<version>4.1.15</version>
<parent>
<groupId>org.springframework.data.build</groupId>
<artifactId>spring-data-parent</artifactId>
<version>2.4.0</version>
<version>2.4.15</version>
</parent>
<name>Spring Data Elasticsearch</name>
@ -22,8 +22,8 @@
<elasticsearch>7.9.3</elasticsearch>
<log4j>2.13.3</log4j>
<netty>4.1.52.Final</netty>
<springdata.commons>2.4.0</springdata.commons>
<testcontainers>1.14.3</testcontainers>
<springdata.commons>2.4.15</springdata.commons>
<testcontainers>1.15.1</testcontainers>
<java-module-name>spring.data.elasticsearch</java-module-name>
</properties>

29
settings.xml Normal file
View File

@ -0,0 +1,29 @@
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
https://maven.apache.org/xsd/settings-1.0.0.xsd">
<servers>
<server>
<id>spring-plugins-release</id>
<username>${env.ARTIFACTORY_USR}</username>
<password>${env.ARTIFACTORY_PSW}</password>
</server>
<server>
<id>spring-libs-snapshot</id>
<username>${env.ARTIFACTORY_USR}</username>
<password>${env.ARTIFACTORY_PSW}</password>
</server>
<server>
<id>spring-libs-milestone</id>
<username>${env.ARTIFACTORY_USR}</username>
<password>${env.ARTIFACTORY_PSW}</password>
</server>
<server>
<id>spring-libs-release</id>
<username>${env.ARTIFACTORY_USR}</username>
<password>${env.ARTIFACTORY_PSW}</password>
</server>
</servers>
</settings>

View File

@ -34,7 +34,7 @@ The following table shows the Elasticsearch versions that are used by Spring Dat
[cols="^,^,^,^",options="header"]
|===
| Spring Data Release Train |Spring Data Elasticsearch |Elasticsearch | Spring Boot
| 2020.0.0footnote:cdv[Currently in development] |4.1.xfootnote:cdv[]|7.9.3 |2.3.xfootnote:cdv[]
| 2020.0.0footnote:cdv[Currently in development] |4.1.xfootnote:cdv[]|7.9.3 |2.4.xfootnote:cdv[]
| Neumann | 4.0.x | 7.6.2 |2.3.x
| Moore | 3.2.x |6.8.12 | 2.2.x
| Lovelace | 3.1.x | 6.2.2 |2.1.x

View File

@ -30,11 +30,15 @@ public class Person implements Persistable<Long> {
@Id private Long id;
private String lastName;
private String firstName;
@CreatedDate
@Field(type = FieldType.Date, format = DateFormat.basic_date_time)
private Instant createdDate;
@CreatedBy
private String createdBy
@Field(type = FieldType.Date, format = DateFormat.basic_date_time)
@LastModifiedDate
private Instant lastModifiedDate;
@LastModifiedBy
private String lastModifiedBy;
public Long getId() { // <.>

View File

@ -154,7 +154,7 @@ httpHeaders.add("some-header", "on every request") <1>
ClientConfiguration clientConfiguration = ClientConfiguration.builder()
.connectedTo("localhost:9200", "localhost:9291") <2>
.useSsl() <3>
.usingSsl() <3>
.withProxy("localhost:8888") <4>
.withPathPrefix("ela") <5>
.withConnectTimeout(Duration.ofSeconds(5)) <6>

View File

@ -58,13 +58,14 @@ Constructor arguments are mapped by name to the key values in the retrieved Docu
** `name`: The name of the field as it will be represented in the Elasticsearch document, if not set, the Java field name is used.
** `type`: the field type, can be one of _Text, Keyword, Long, Integer, Short, Byte, Double, Float, Half_Float, Scaled_Float, Date, Date_Nanos, Boolean, Binary, Integer_Range, Float_Range, Long_Range, Double_Range, Date_Range, Ip_Range, Object, Nested, Ip, TokenCount, Percolator, Flattened, Search_As_You_Type_.
See https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-types.html[Elasticsearch Mapping Types]
** `format` and `pattern` definitions for the _Date_ type. `format` must be defined for date types.
** `format` and `pattern` definitions for the _Date_ type.
** `store`: Flag whether the original field value should be store in Elasticsearch, default value is _false_.
** `analyzer`, `searchAnalyzer`, `normalizer` for specifying custom analyzers and normalizer.
* `@GeoPoint`: marks a field as _geo_point_ datatype.
Can be omitted if the field is an instance of the `GeoPoint` class.
NOTE: Properties that derive from `TemporalAccessor` must either have a `@Field` annotation of type `FieldType.Date` or a custom converter must be registered for this type. +
NOTE: Properties that derive from `TemporalAccessor` or are of type `java.util.Date` must either have a `@Field` annotation of type `FieldType.Date` and a
format different from `DateFormat.none` or a custom converter must be registered for this type. +
If you are using a custom date format, you need to use _uuuu_ for the year instead of _yyyy_.
This is due to a https://www.elastic.co/guide/en/elasticsearch/reference/current/migrate-to-java-time.html#java-time-migration-incompatible-date-formats[change in Elasticsearch 7].

View File

@ -20,5 +20,5 @@ package org.springframework.data.elasticsearch.annotations;
* @since 4.0
*/
public enum TermVector {
none, no, yes, with_positions, with_offsets, with_positions_offsets, with_positions_payloads, with_positions_offets_payloads
none, no, yes, with_positions, with_offsets, with_positions_offsets, with_positions_payloads, with_positions_offsets_payloads
}

View File

@ -54,6 +54,7 @@ import org.springframework.util.Assert;
* @author Huw Ayling-Miller
* @author Henrique Amaral
* @author Peter-Josef Meisch
* @author Nic Hines
* @since 3.2
*/
public final class RestClients {
@ -104,15 +105,14 @@ public final class RestClients {
Duration connectTimeout = clientConfiguration.getConnectTimeout();
if (!connectTimeout.isNegative()) {
requestConfigBuilder.setConnectTimeout(Math.toIntExact(connectTimeout.toMillis()));
requestConfigBuilder.setConnectionRequestTimeout(Math.toIntExact(connectTimeout.toMillis()));
}
Duration timeout = clientConfiguration.getSocketTimeout();
Duration socketTimeout = clientConfiguration.getSocketTimeout();
if (!timeout.isNegative()) {
requestConfigBuilder.setSocketTimeout(Math.toIntExact(timeout.toMillis()));
if (!socketTimeout.isNegative()) {
requestConfigBuilder.setSocketTimeout(Math.toIntExact(socketTimeout.toMillis()));
requestConfigBuilder.setConnectionRequestTimeout(Math.toIntExact(socketTimeout.toMillis()));
}
clientBuilder.setDefaultRequestConfig(requestConfigBuilder.build());

View File

@ -145,7 +145,7 @@ import org.springframework.web.reactive.function.client.WebClient.RequestBodySpe
*/
public class DefaultReactiveElasticsearchClient implements ReactiveElasticsearchClient, Indices {
private final HostProvider hostProvider;
private final HostProvider<?> hostProvider;
private final RequestCreator requestCreator;
private Supplier<HttpHeaders> headersSupplier = () -> HttpHeaders.EMPTY;
@ -155,7 +155,7 @@ public class DefaultReactiveElasticsearchClient implements ReactiveElasticsearch
*
* @param hostProvider must not be {@literal null}.
*/
public DefaultReactiveElasticsearchClient(HostProvider hostProvider) {
public DefaultReactiveElasticsearchClient(HostProvider<?> hostProvider) {
this(hostProvider, new DefaultRequestCreator());
}
@ -166,7 +166,7 @@ public class DefaultReactiveElasticsearchClient implements ReactiveElasticsearch
* @param hostProvider must not be {@literal null}.
* @param requestCreator must not be {@literal null}.
*/
public DefaultReactiveElasticsearchClient(HostProvider hostProvider, RequestCreator requestCreator) {
public DefaultReactiveElasticsearchClient(HostProvider<?> hostProvider, RequestCreator requestCreator) {
Assert.notNull(hostProvider, "HostProvider must not be null");
Assert.notNull(requestCreator, "RequestCreator must not be null");
@ -535,8 +535,7 @@ public class DefaultReactiveElasticsearchClient implements ReactiveElasticsearch
.flatMap(callback::doWithClient) //
.onErrorResume(throwable -> {
if (throwable instanceof ConnectException) {
if (isCausedByConnectionException(throwable)) {
return hostProvider.getActive(Verification.ACTIVE) //
.flatMap(callback::doWithClient);
}
@ -545,6 +544,27 @@ public class DefaultReactiveElasticsearchClient implements ReactiveElasticsearch
});
}
/**
* checks if the given throwable is a {@link ConnectException} or has one in it's cause chain
*
* @param throwable the throwable to check
* @return true if throwable is caused by a {@link ConnectException}
*/
private boolean isCausedByConnectionException(Throwable throwable) {
Throwable t = throwable;
do {
if (t instanceof ConnectException) {
return true;
}
t = t.getCause();
} while (t != null);
return false;
}
@Override
public Mono<Status> status() {
@ -823,6 +843,9 @@ public class DefaultReactiveElasticsearchClient implements ReactiveElasticsearch
String mediaType = response.headers().contentType().map(MediaType::toString).orElse(XContentType.JSON.mediaType());
return response.body(BodyExtractors.toMono(byte[].class)) //
.switchIfEmpty(Mono.error(
new ElasticsearchStatusException(String.format("%s request to %s returned error code %s and no body.",
request.getMethod(), request.getEndpoint(), statusCode), status)))
.map(bytes -> new String(bytes, StandardCharsets.UTF_8)) //
.flatMap(content -> contentOrError(content, mediaType, status))
.flatMap(unused -> Mono

View File

@ -1,5 +1,5 @@
/*
* Copyright 2018-2020 the original author or authors.
* Copyright 2018-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -27,6 +27,7 @@ import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.web.reactive.function.client.WebClient;
import org.springframework.web.reactive.function.client.WebClient.Builder;
import org.springframework.web.util.DefaultUriBuilderFactory;
/**
* Default {@link WebClientProvider} that uses cached {@link WebClient} instances per {@code hostAndPort}.
@ -156,7 +157,16 @@ class DefaultWebClientProvider implements WebClientProvider {
String baseUrl = String.format("%s://%s:%d%s", this.scheme, socketAddress.getHostString(), socketAddress.getPort(),
pathPrefix == null ? "" : '/' + pathPrefix);
WebClient webClient = builder.baseUrl(baseUrl).filter((request, next) -> next.exchange(request).doOnError(errorListener)).build();
DefaultUriBuilderFactory uriBuilderFactory = new DefaultUriBuilderFactory(baseUrl);
// the template will already be encoded by the RequestConverters methods
uriBuilderFactory.setEncodingMode(DefaultUriBuilderFactory.EncodingMode.VALUES_ONLY);
builder.uriBuilderFactory(uriBuilderFactory); //
WebClient webClient = builder //
.filter((request, next) -> next.exchange(request) //
.doOnError(errorListener)) //
.build(); //
return webClientConfigurer.apply(webClient);
}
}

View File

@ -1,5 +1,5 @@
/*
* Copyright 2018-2020 the original author or authors.
* Copyright 2018-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -34,9 +34,10 @@ import org.springframework.web.reactive.function.client.WebClient;
*
* @author Christoph Strobl
* @author Mark Paluch
* @author Peter-Josef Meisch
* @since 3.2
*/
public interface HostProvider {
public interface HostProvider<T extends HostProvider<T>> {
/**
* Create a new {@link HostProvider} best suited for the given {@link WebClientProvider} and number of hosts.
@ -46,7 +47,7 @@ public interface HostProvider {
* @param endpoints must not be {@literal null} nor empty.
* @return new instance of {@link HostProvider}.
*/
static HostProvider provider(WebClientProvider clientProvider, Supplier<HttpHeaders> headersSupplier,
static HostProvider<?> provider(WebClientProvider clientProvider, Supplier<HttpHeaders> headersSupplier,
InetSocketAddress... endpoints) {
Assert.notNull(clientProvider, "WebClientProvider must not be null");

View File

@ -1,5 +1,5 @@
/*
* Copyright 2018-2020 the original author or authors.
* Copyright 2018-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -20,6 +20,7 @@ import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2;
import java.net.InetSocketAddress;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@ -29,6 +30,8 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Supplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.elasticsearch.client.ElasticsearchHost;
import org.springframework.data.elasticsearch.client.ElasticsearchHost.State;
import org.springframework.data.elasticsearch.client.NoReachableHostException;
@ -42,15 +45,19 @@ import org.springframework.web.reactive.function.client.WebClient;
*
* @author Christoph Strobl
* @author Mark Paluch
* @author Peter-Josef Meisch
* @since 3.2
*/
class MultiNodeHostProvider implements HostProvider {
class MultiNodeHostProvider implements HostProvider<MultiNodeHostProvider> {
private final static Logger LOG = LoggerFactory.getLogger(MultiNodeHostProvider.class);
private final WebClientProvider clientProvider;
private final Supplier<HttpHeaders> headersSupplier;
private final Map<InetSocketAddress, ElasticsearchHost> hosts;
MultiNodeHostProvider(WebClientProvider clientProvider, Supplier<HttpHeaders> headersSupplier, InetSocketAddress... endpoints) {
MultiNodeHostProvider(WebClientProvider clientProvider, Supplier<HttpHeaders> headersSupplier,
InetSocketAddress... endpoints) {
this.clientProvider = clientProvider;
this.headersSupplier = headersSupplier;
@ -58,6 +65,8 @@ class MultiNodeHostProvider implements HostProvider {
for (InetSocketAddress endpoint : endpoints) {
this.hosts.put(endpoint, new ElasticsearchHost(endpoint, State.UNKNOWN));
}
LOG.debug("initialized with " + hosts);
}
/*
@ -66,7 +75,7 @@ class MultiNodeHostProvider implements HostProvider {
*/
@Override
public Mono<ClusterInformation> clusterInfo() {
return nodes(null).map(this::updateNodeState).buffer(hosts.size())
return checkNodes(null).map(this::updateNodeState).buffer(hosts.size())
.then(Mono.just(new ClusterInformation(new LinkedHashSet<>(this.hosts.values()))));
}
@ -86,14 +95,19 @@ class MultiNodeHostProvider implements HostProvider {
@Override
public Mono<InetSocketAddress> lookupActiveHost(Verification verification) {
LOG.trace("lookupActiveHost " + verification + " from " + hosts());
if (Verification.LAZY.equals(verification)) {
for (ElasticsearchHost entry : hosts()) {
if (entry.isOnline()) {
LOG.trace("lookupActiveHost returning " + entry);
return Mono.just(entry.getEndpoint());
}
}
LOG.trace("no online host found with LAZY");
}
LOG.trace("searching for active host");
return findActiveHostInKnownActives() //
.switchIfEmpty(findActiveHostInUnresolved()) //
.switchIfEmpty(findActiveHostInDead()) //
@ -105,20 +119,30 @@ class MultiNodeHostProvider implements HostProvider {
}
private Mono<InetSocketAddress> findActiveHostInKnownActives() {
return findActiveForSate(State.ONLINE);
return findActiveForState(State.ONLINE);
}
private Mono<InetSocketAddress> findActiveHostInUnresolved() {
return findActiveForSate(State.UNKNOWN);
return findActiveForState(State.UNKNOWN);
}
private Mono<InetSocketAddress> findActiveHostInDead() {
return findActiveForSate(State.OFFLINE);
return findActiveForState(State.OFFLINE);
}
private Mono<InetSocketAddress> findActiveForSate(State state) {
return nodes(state).map(this::updateNodeState).filter(ElasticsearchHost::isOnline)
.map(ElasticsearchHost::getEndpoint).next();
private Mono<InetSocketAddress> findActiveForState(State state) {
LOG.trace("findActiveForState state " + state + ", current hosts: " + hosts);
return checkNodes(state) //
.map(this::updateNodeState) //
.filter(ElasticsearchHost::isOnline) //
.map(elasticsearchHost -> {
LOG.trace("findActiveForState returning host " + elasticsearchHost);
return elasticsearchHost;
}).map(ElasticsearchHost::getEndpoint) //
.takeLast(1) //
.next();
}
private ElasticsearchHost updateNodeState(Tuple2<InetSocketAddress, State> tuple2) {
@ -129,17 +153,23 @@ class MultiNodeHostProvider implements HostProvider {
return elasticsearchHost;
}
private Flux<Tuple2<InetSocketAddress, State>> nodes(@Nullable State state) {
private Flux<Tuple2<InetSocketAddress, State>> checkNodes(@Nullable State state) {
LOG.trace("checkNodes() with state " + state);
return Flux.fromIterable(hosts()) //
.filter(entry -> state == null || entry.getState().equals(state)) //
.map(ElasticsearchHost::getEndpoint) //
.flatMap(host -> {
.concatMap(host -> {
LOG.trace("checking host " + host);
Mono<ClientResponse> exchange = createWebClient(host) //
.head().uri("/") //
.headers(httpHeaders -> httpHeaders.addAll(headersSupplier.get())) //
.exchange().doOnError(throwable -> {
.exchange() //
.timeout(Duration.ofSeconds(1)) //
.doOnError(throwable -> {
hosts.put(host, new ElasticsearchHost(host, State.OFFLINE));
clientProvider.getErrorListener().accept(throwable);
});
@ -147,7 +177,10 @@ class MultiNodeHostProvider implements HostProvider {
return Mono.just(host).zipWith(exchange
.flatMap(it -> it.releaseBody().thenReturn(it.statusCode().isError() ? State.OFFLINE : State.ONLINE)));
}) //
.onErrorContinue((throwable, o) -> clientProvider.getErrorListener().accept(throwable));
.map(tuple -> {
LOG.trace("check result " + tuple);
return tuple;
}).onErrorContinue((throwable, o) -> clientProvider.getErrorListener().accept(throwable));
}
private List<ElasticsearchHost> hosts() {

View File

@ -1,5 +1,5 @@
/*
* Copyright 2018-2020 the original author or authors.
* Copyright 2018-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -32,9 +32,10 @@ import org.springframework.web.reactive.function.client.WebClient;
*
* @author Christoph Strobl
* @author Mark Paluch
* @author Peter-Josef Meisch
* @since 3.2
*/
class SingleNodeHostProvider implements HostProvider {
class SingleNodeHostProvider implements HostProvider<SingleNodeHostProvider> {
private final WebClientProvider clientProvider;
private final Supplier<HttpHeaders> headersSupplier;

View File

@ -20,6 +20,12 @@ import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.elasticsearch.client.analytics.InferencePipelineAggregationBuilder;
import org.elasticsearch.client.analytics.ParsedInference;
import org.elasticsearch.client.analytics.ParsedStringStats;
import org.elasticsearch.client.analytics.ParsedTopMetrics;
import org.elasticsearch.client.analytics.StringStatsAggregationBuilder;
import org.elasticsearch.client.analytics.TopMetricsAggregationBuilder;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ContextParser;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@ -44,6 +50,8 @@ import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregati
import org.elasticsearch.search.aggregations.bucket.histogram.ParsedAutoDateHistogram;
import org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram;
import org.elasticsearch.search.aggregations.bucket.histogram.ParsedHistogram;
import org.elasticsearch.search.aggregations.bucket.histogram.ParsedVariableWidthHistogram;
import org.elasticsearch.search.aggregations.bucket.histogram.VariableWidthHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.missing.ParsedMissing;
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder;
@ -64,7 +72,11 @@ import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms;
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.ParsedDoubleTerms;
import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.ParsedSignificantLongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.ParsedSignificantStringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.SignificantLongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.SignificantStringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.metrics.*;
import org.elasticsearch.search.aggregations.pipeline.*;
@ -81,7 +93,7 @@ import org.springframework.data.elasticsearch.client.reactive.ReactiveElasticsea
* <p>
* Original implementation source {@link org.elasticsearch.client.RestHighLevelClient#getDefaultNamedXContents()} by
* {@literal Elasticsearch} (<a href="https://www.elastic.co">https://www.elastic.co</a>) licensed under the Apache
* License, Version 2.0.
* License, Version 2.0. The latest version used from Elasticsearch is 7.10.2.
* </p>
* Modified for usage with {@link ReactiveElasticsearchClient}.
* <p>
@ -126,6 +138,8 @@ public class NamedXContents {
map.put(HistogramAggregationBuilder.NAME, (p, c) -> ParsedHistogram.fromXContent(p, (String) c));
map.put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c));
map.put(AutoDateHistogramAggregationBuilder.NAME, (p, c) -> ParsedAutoDateHistogram.fromXContent(p, (String) c));
map.put(VariableWidthHistogramAggregationBuilder.NAME,
(p, c) -> ParsedVariableWidthHistogram.fromXContent(p, (String) c));
map.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c));
map.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c));
map.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c));
@ -142,10 +156,15 @@ public class NamedXContents {
map.put(GeoDistanceAggregationBuilder.NAME, (p, c) -> ParsedGeoDistance.fromXContent(p, (String) c));
map.put(FiltersAggregationBuilder.NAME, (p, c) -> ParsedFilters.fromXContent(p, (String) c));
map.put(AdjacencyMatrixAggregationBuilder.NAME, (p, c) -> ParsedAdjacencyMatrix.fromXContent(p, (String) c));
map.put(SignificantLongTerms.NAME, (p, c) -> ParsedSignificantLongTerms.fromXContent(p, (String) c));
map.put(SignificantStringTerms.NAME, (p, c) -> ParsedSignificantStringTerms.fromXContent(p, (String) c));
map.put(ScriptedMetricAggregationBuilder.NAME, (p, c) -> ParsedScriptedMetric.fromXContent(p, (String) c));
map.put(IpRangeAggregationBuilder.NAME, (p, c) -> ParsedBinaryRange.fromXContent(p, (String) c));
map.put(TopHitsAggregationBuilder.NAME, (p, c) -> ParsedTopHits.fromXContent(p, (String) c));
map.put(CompositeAggregationBuilder.NAME, (p, c) -> ParsedComposite.fromXContent(p, (String) c));
map.put(StringStatsAggregationBuilder.NAME, (p, c) -> ParsedStringStats.PARSER.parse(p, (String) c));
map.put(TopMetricsAggregationBuilder.NAME, (p, c) -> ParsedTopMetrics.PARSER.parse(p, (String) c));
map.put(InferencePipelineAggregationBuilder.NAME, (p, c) -> ParsedInference.fromXContent(p, (String) (c)));
List<NamedXContentRegistry.Entry> entries = map.entrySet().stream().map(
entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue()))
.collect(Collectors.toList());

View File

@ -1319,7 +1319,8 @@ public class RequestConverters {
// encode each part (e.g. index, type and id) separately before merging them into the path
// we prepend "/" to the path part to make this path absolute, otherwise there can be issues with
// paths that start with `-` or contain `:`
URI uri = new URI(null, null, null, -1, '/' + pathPart, null, null);
// the authority must be an empty string and not null, else paths that being with slashes could have them
URI uri = new URI((String) null, "", "/" + pathPart, (String) null, (String) null);
// manually encode any slash that each part may contain
return uri.getRawPath().substring(1).replaceAll("/", "%2F");
} catch (URISyntaxException e) {

View File

@ -31,20 +31,25 @@ public abstract class AbstractElasticsearchConfiguration extends ElasticsearchCo
/**
* Return the {@link RestHighLevelClient} instance used to connect to the cluster. <br />
* Annotate with {@link Bean} in case you want to expose a {@link RestHighLevelClient} instance to the
* {@link org.springframework.context.ApplicationContext}.
*
* @return never {@literal null}.
*/
@Bean
public abstract RestHighLevelClient elasticsearchClient();
/**
* Creates {@link ElasticsearchOperations}.
* Creates {@link ElasticsearchOperations}. <br/>
* NOTE: in version 4.1.2 the second parameter was added, previously this implementation called
* {@link #elasticsearchClient()} directly. This is not possible anymore, as the base configuration classes don not
* use proxied bean methods anymore.
*
* @param elasticsearchConverter the {@link ElasticsearchConverter} to use*
* @param elasticsearchClient the {@link RestHighLevelClient} to use
* @return never {@literal null}.
*/
@Bean(name = { "elasticsearchOperations", "elasticsearchTemplate" })
public ElasticsearchOperations elasticsearchOperations(ElasticsearchConverter elasticsearchConverter) {
return new ElasticsearchRestTemplate(elasticsearchClient(), elasticsearchConverter);
public ElasticsearchOperations elasticsearchOperations(ElasticsearchConverter elasticsearchConverter,
RestHighLevelClient elasticsearchClient) {
return new ElasticsearchRestTemplate(elasticsearchClient, elasticsearchConverter);
}
}

View File

@ -18,7 +18,6 @@ package org.springframework.data.elasticsearch.config;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.elasticsearch.client.reactive.ReactiveElasticsearchClient;
import org.springframework.data.elasticsearch.core.ReactiveElasticsearchOperations;
import org.springframework.data.elasticsearch.core.ReactiveElasticsearchTemplate;
@ -31,16 +30,14 @@ import org.springframework.lang.Nullable;
* @since 3.2
* @see ElasticsearchConfigurationSupport
*/
@Configuration
public abstract class AbstractReactiveElasticsearchConfiguration extends ElasticsearchConfigurationSupport {
/**
* Return the {@link ReactiveElasticsearchClient} instance used to connect to the cluster. <br />
* Annotate with {@link Bean} in case you want to expose a {@link ReactiveElasticsearchClient} instance to the
* {@link org.springframework.context.ApplicationContext}.
*
* @return never {@literal null}.
*/
@Bean
public abstract ReactiveElasticsearchClient reactiveElasticsearchClient();
/**
@ -49,9 +46,10 @@ public abstract class AbstractReactiveElasticsearchConfiguration extends Elastic
* @return never {@literal null}.
*/
@Bean
public ReactiveElasticsearchOperations reactiveElasticsearchTemplate(ElasticsearchConverter elasticsearchConverter) {
public ReactiveElasticsearchOperations reactiveElasticsearchTemplate(ElasticsearchConverter elasticsearchConverter,
ReactiveElasticsearchClient reactiveElasticsearchClient) {
ReactiveElasticsearchTemplate template = new ReactiveElasticsearchTemplate(reactiveElasticsearchClient(),
ReactiveElasticsearchTemplate template = new ReactiveElasticsearchTemplate(reactiveElasticsearchClient,
elasticsearchConverter);
template.setIndicesOptions(indicesOptions());
template.setRefreshPolicy(refreshPolicy());

View File

@ -40,15 +40,16 @@ import org.springframework.util.StringUtils;
* @author Peter-Josef Meisch
* @since 3.2
*/
@Configuration
@Configuration(proxyBeanMethods = false)
public class ElasticsearchConfigurationSupport {
@Bean
public ElasticsearchConverter elasticsearchEntityMapper(
SimpleElasticsearchMappingContext elasticsearchMappingContext) {
SimpleElasticsearchMappingContext elasticsearchMappingContext, ElasticsearchCustomConversions elasticsearchCustomConversions) {
MappingElasticsearchConverter elasticsearchConverter = new MappingElasticsearchConverter(
elasticsearchMappingContext);
elasticsearchConverter.setConversions(elasticsearchCustomConversions());
elasticsearchConverter.setConversions(elasticsearchCustomConversions);
return elasticsearchConverter;
}
@ -60,11 +61,11 @@ public class ElasticsearchConfigurationSupport {
* @return never {@literal null}.
*/
@Bean
public SimpleElasticsearchMappingContext elasticsearchMappingContext() {
public SimpleElasticsearchMappingContext elasticsearchMappingContext(ElasticsearchCustomConversions elasticsearchCustomConversions) {
SimpleElasticsearchMappingContext mappingContext = new SimpleElasticsearchMappingContext();
mappingContext.setInitialEntitySet(getInitialEntitySet());
mappingContext.setSimpleTypeHolder(elasticsearchCustomConversions().getSimpleTypeHolder());
mappingContext.setSimpleTypeHolder(elasticsearchCustomConversions.getSimpleTypeHolder());
return mappingContext;
}

View File

@ -147,13 +147,14 @@ public abstract class AbstractElasticsearchTemplate implements ElasticsearchOper
Assert.notNull(entity, "entity must not be null");
Assert.notNull(index, "index must not be null");
IndexQuery query = getIndexQuery(entity);
index(query, index);
T entityAfterBeforeConvert = maybeCallbackBeforeConvert(entity, index);
// suppressing because it's either entity itself or something of a correct type returned by an entity callback
@SuppressWarnings("unchecked")
T castResult = (T) query.getObject();
return castResult;
IndexQuery query = getIndexQuery(entityAfterBeforeConvert);
doIndex(query, index);
T entityAfterAfterSave = maybeCallbackAfterSave(entityAfterBeforeConvert, index);
return entityAfterAfterSave;
}
@Override
@ -192,6 +193,20 @@ public abstract class AbstractElasticsearchTemplate implements ElasticsearchOper
return save(Arrays.asList(entities));
}
@Override
public String index(IndexQuery query, IndexCoordinates index) {
maybeCallbackBeforeConvertWithQuery(query, index);
String documentId = doIndex(query, index);
maybeCallbackAfterSaveWithQuery(query, index);
return documentId;
}
public abstract String doIndex(IndexQuery query, IndexCoordinates indexCoordinates);
@Override
@Nullable
public <T> T get(String id, Class<T> clazz) {
@ -261,11 +276,38 @@ public abstract class AbstractElasticsearchTemplate implements ElasticsearchOper
return bulkIndex(queries, bulkOptions, getIndexCoordinatesFor(clazz));
}
@Override
public final List<IndexedObjectInformation> bulkIndex(List<IndexQuery> queries, BulkOptions bulkOptions,
IndexCoordinates index) {
Assert.notNull(queries, "List of IndexQuery must not be null");
Assert.notNull(bulkOptions, "BulkOptions must not be null");
return bulkOperation(queries, bulkOptions, index);
}
@Override
public void bulkUpdate(List<UpdateQuery> queries, Class<?> clazz) {
bulkUpdate(queries, getIndexCoordinatesFor(clazz));
}
public List<IndexedObjectInformation> bulkOperation(List<?> queries, BulkOptions bulkOptions,
IndexCoordinates index) {
Assert.notNull(queries, "List of IndexQuery must not be null");
Assert.notNull(bulkOptions, "BulkOptions must not be null");
maybeCallbackBeforeConvertWithQueries(queries, index);
List<IndexedObjectInformation> indexedObjectInformations = doBulkOperation(queries, bulkOptions, index);
maybeCallbackAfterSaveWithQueries(queries, index);
return indexedObjectInformations;
}
public abstract List<IndexedObjectInformation> doBulkOperation(List<?> queries, BulkOptions bulkOptions,
IndexCoordinates index);
// endregion
// region SearchOperations
@ -310,7 +352,7 @@ public abstract class AbstractElasticsearchTemplate implements ElasticsearchOper
Assert.notNull(query.getId(), "No document id defined for MoreLikeThisQuery");
MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = requestFactory.moreLikeThisQueryBuilder(query, index);
return search(new NativeSearchQueryBuilder().withQuery(moreLikeThisQueryBuilder).build(), clazz, index);
return search(new NativeSearchQueryBuilder().withQuery(moreLikeThisQueryBuilder).withPageable(query.getPageable()).build(), clazz, index);
}
@Override
@ -620,6 +662,20 @@ public abstract class AbstractElasticsearchTemplate implements ElasticsearchOper
if (queryObject != null) {
queryObject = maybeCallbackBeforeConvert(queryObject, index);
indexQuery.setObject(queryObject);
// the callback might have set som values relevant for the IndexQuery
IndexQuery newQuery = getIndexQuery(queryObject);
if (indexQuery.getRouting() == null && newQuery.getRouting() != null) {
indexQuery.setRouting(newQuery.getRouting());
}
if (indexQuery.getSeqNo() == null && newQuery.getSeqNo() != null) {
indexQuery.setSeqNo(newQuery.getSeqNo());
}
if (indexQuery.getPrimaryTerm() == null && newQuery.getPrimaryTerm() != null) {
indexQuery.setPrimaryTerm(newQuery.getPrimaryTerm());
}
}
}
}

View File

@ -16,6 +16,7 @@
package org.springframework.data.elasticsearch.core;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -34,11 +35,13 @@ import org.elasticsearch.client.indices.GetIndexRequest;
import org.elasticsearch.client.indices.GetIndexTemplatesRequest;
import org.elasticsearch.client.indices.GetIndexTemplatesResponse;
import org.elasticsearch.client.indices.GetMappingsRequest;
import org.elasticsearch.client.indices.GetMappingsResponse;
import org.elasticsearch.client.indices.IndexTemplatesExistRequest;
import org.elasticsearch.client.indices.PutIndexTemplateRequest;
import org.elasticsearch.client.indices.PutMappingRequest;
import org.elasticsearch.cluster.metadata.AliasMetadata;
import org.elasticsearch.cluster.metadata.MappingMetadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.elasticsearch.core.document.Document;
import org.springframework.data.elasticsearch.core.index.AliasActions;
import org.springframework.data.elasticsearch.core.index.AliasData;
@ -61,6 +64,8 @@ import org.springframework.util.Assert;
*/
class DefaultIndexOperations extends AbstractDefaultIndexOperations implements IndexOperations {
private static final Logger LOGGER = LoggerFactory.getLogger(DefaultIndexOperations.class);
private final ElasticsearchRestTemplate restTemplate;
public DefaultIndexOperations(ElasticsearchRestTemplate restTemplate, Class<?> boundClass) {
@ -117,10 +122,19 @@ class DefaultIndexOperations extends AbstractDefaultIndexOperations implements I
GetMappingsRequest mappingsRequest = requestFactory.getMappingsRequest(index);
return restTemplate.execute(client -> {
GetMappingsResponse mapping = client.indices().getMapping(mappingsRequest, RequestOptions.DEFAULT);
// we only return data for the first index name that was requested (always have done so)
String index1 = mappingsRequest.indices()[0];
return mapping.mappings().get(index1).getSourceAsMap();
Map<String, MappingMetadata> mappings = client.indices() //
.getMapping(mappingsRequest, RequestOptions.DEFAULT) //
.mappings(); //
if (mappings == null || mappings.size() == 0) {
return Collections.emptyMap();
}
if (mappings.size() > 1) {
LOGGER.warn("more than one mapping returned for " + index.getIndexName());
}
// we have at least one, take the first from the iterator
return mappings.entrySet().iterator().next().getValue().getSourceAsMap();
});
}

View File

@ -15,6 +15,7 @@
*/
package org.springframework.data.elasticsearch.core;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
@ -40,6 +41,7 @@ import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateReque
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.AliasMetadata;
import org.elasticsearch.cluster.metadata.IndexTemplateMetadata;
import org.elasticsearch.cluster.metadata.MappingMetadata;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
@ -126,10 +128,19 @@ class DefaultTransportIndexOperations extends AbstractDefaultIndexOperations imp
GetMappingsRequest mappingsRequest = requestFactory.getMappingsRequest(client, index);
return client.admin().indices().getMappings( //
ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetadata>> mappings = client.admin().indices().getMappings( //
mappingsRequest).actionGet() //
.getMappings().get(mappingsRequest.indices()[0]).get(IndexCoordinates.TYPE) //
.getSourceAsMap();
.getMappings();
if (mappings == null || mappings.size() == 0) {
return Collections.emptyMap();
}
if (mappings.size() > 1) {
LOGGER.warn("more than one mapping returned for " + index.getIndexName());
}
// we have at least one, take the first from the iterator
return mappings.iterator().next().value.get(IndexCoordinates.TYPE).getSourceAsMap();
}
@Override

View File

@ -137,10 +137,7 @@ public class ElasticsearchRestTemplate extends AbstractElasticsearchTemplate {
// endregion
// region DocumentOperations
@Override
public String index(IndexQuery query, IndexCoordinates index) {
maybeCallbackBeforeConvertWithQuery(query, index);
public String doIndex(IndexQuery query, IndexCoordinates index) {
IndexRequest request = requestFactory.indexRequest(query, index);
IndexResponse indexResponse = execute(client -> client.index(request, RequestOptions.DEFAULT));
@ -152,8 +149,6 @@ public class ElasticsearchRestTemplate extends AbstractElasticsearchTemplate {
indexResponse.getPrimaryTerm(), indexResponse.getVersion()));
}
maybeCallbackAfterSaveWithQuery(query, index);
return indexResponse.getId();
}
@ -187,16 +182,6 @@ public class ElasticsearchRestTemplate extends AbstractElasticsearchTemplate {
return execute(client -> client.get(request, RequestOptions.DEFAULT).isExists());
}
@Override
public List<IndexedObjectInformation> bulkIndex(List<IndexQuery> queries, BulkOptions bulkOptions,
IndexCoordinates index) {
Assert.notNull(queries, "List of IndexQuery must not be null");
Assert.notNull(bulkOptions, "BulkOptions must not be null");
return doBulkOperation(queries, bulkOptions, index);
}
@Override
public void bulkUpdate(List<UpdateQuery> queries, BulkOptions bulkOptions, IndexCoordinates index) {
@ -237,14 +222,12 @@ public class ElasticsearchRestTemplate extends AbstractElasticsearchTemplate {
return new UpdateResponse(result);
}
private List<IndexedObjectInformation> doBulkOperation(List<?> queries, BulkOptions bulkOptions,
public List<IndexedObjectInformation> doBulkOperation(List<?> queries, BulkOptions bulkOptions,
IndexCoordinates index) {
maybeCallbackBeforeConvertWithQueries(queries, index);
BulkRequest bulkRequest = requestFactory.bulkRequest(queries, bulkOptions, index);
List<IndexedObjectInformation> indexedObjectInformationList = checkForBulkOperationFailure(
execute(client -> client.bulk(bulkRequest, RequestOptions.DEFAULT)));
updateIndexedObjectsWithQueries(queries, indexedObjectInformationList);
maybeCallbackAfterSaveWithQueries(queries, index);
return indexedObjectInformationList;
}
// endregion

View File

@ -144,10 +144,7 @@ public class ElasticsearchTemplate extends AbstractElasticsearchTemplate {
// endregion
// region DocumentOperations
@Override
public String index(IndexQuery query, IndexCoordinates index) {
maybeCallbackBeforeConvertWithQuery(query, index);
public String doIndex(IndexQuery query, IndexCoordinates index) {
IndexRequestBuilder indexRequestBuilder = requestFactory.indexRequestBuilder(client, query, index);
ActionFuture<IndexResponse> future = indexRequestBuilder.execute();
@ -166,8 +163,6 @@ public class ElasticsearchTemplate extends AbstractElasticsearchTemplate {
response.getPrimaryTerm(), response.getVersion()));
}
maybeCallbackAfterSaveWithQuery(query, index);
return documentId;
}
@ -201,22 +196,6 @@ public class ElasticsearchTemplate extends AbstractElasticsearchTemplate {
return getRequestBuilder.execute().actionGet().isExists();
}
@Override
public List<IndexedObjectInformation> bulkIndex(List<IndexQuery> queries, BulkOptions bulkOptions,
IndexCoordinates index) {
Assert.notNull(queries, "List of IndexQuery must not be null");
Assert.notNull(bulkOptions, "BulkOptions must not be null");
List<IndexedObjectInformation> indexedObjectInformations = doBulkOperation(queries, bulkOptions, index);
updateIndexedObjectsWithQueries(queries, indexedObjectInformations);
maybeCallbackAfterSaveWithQueries(queries, index);
return indexedObjectInformations;
}
@Override
public void bulkUpdate(List<UpdateQuery> queries, BulkOptions bulkOptions, IndexCoordinates index) {
@ -261,11 +240,13 @@ public class ElasticsearchTemplate extends AbstractElasticsearchTemplate {
return new UpdateResponse(result);
}
private List<IndexedObjectInformation> doBulkOperation(List<?> queries, BulkOptions bulkOptions,
public List<IndexedObjectInformation> doBulkOperation(List<?> queries, BulkOptions bulkOptions,
IndexCoordinates index) {
maybeCallbackBeforeConvertWithQueries(queries, index);
BulkRequestBuilder bulkRequest = requestFactory.bulkRequestBuilder(client, queries, bulkOptions, index);
return checkForBulkOperationFailure(bulkRequest.execute().actionGet());
final List<IndexedObjectInformation> indexedObjectInformations = checkForBulkOperationFailure(
bulkRequest.execute().actionGet());
updateIndexedObjectsWithQueries(queries, indexedObjectInformations);
return indexedObjectInformations;
}
// endregion

View File

@ -1,5 +1,5 @@
/*
* Copyright 2019-2020 the original author or authors.
* Copyright 2019-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -54,6 +54,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.client.Client;
@ -85,6 +86,7 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.GeoDistanceSortBuilder;
@ -111,6 +113,7 @@ import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersiste
import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentProperty;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.*;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
@ -860,9 +863,7 @@ class RequestFactory {
elasticsearchConverter.updateQuery(searchQuery, clazz);
List<MultiGetRequest.Item> items = new ArrayList<>();
if (!isEmpty(searchQuery.getFields())) {
searchQuery.addSourceFilter(new FetchSourceFilter(toArray(searchQuery.getFields()), null));
}
FetchSourceContext fetchSourceContext = getFetchSourceContext(searchQuery);
if (!isEmpty(searchQuery.getIds())) {
String indexName = index.getIndexName();
@ -872,6 +873,11 @@ class RequestFactory {
if (searchQuery.getRoute() != null) {
item = item.routing(searchQuery.getRoute());
}
if (fetchSourceContext != null) {
item.fetchSourceContext(fetchSourceContext);
}
items.add(item);
}
}
@ -886,15 +892,17 @@ class RequestFactory {
String indexName = index.getIndexName();
IndexRequest indexRequest;
if (query.getObject() != null) {
String id = StringUtils.isEmpty(query.getId()) ? getPersistentEntityId(query.getObject()) : query.getId();
Object queryObject = query.getObject();
if (queryObject != null) {
String id = StringUtils.isEmpty(query.getId()) ? getPersistentEntityId(queryObject) : query.getId();
// If we have a query id and a document id, do not ask ES to generate one.
if (id != null) {
indexRequest = new IndexRequest(indexName).id(id);
} else {
indexRequest = new IndexRequest(indexName);
}
indexRequest.source(elasticsearchConverter.mapObject(query.getObject()).toJson(), Requests.INDEX_CONTENT_TYPE);
indexRequest.source(elasticsearchConverter.mapObject(queryObject).toJson(), Requests.INDEX_CONTENT_TYPE);
} else if (query.getSource() != null) {
indexRequest = new IndexRequest(indexName).id(query.getId()).source(query.getSource(),
Requests.INDEX_CONTENT_TYPE);
@ -905,7 +913,8 @@ class RequestFactory {
if (query.getVersion() != null) {
indexRequest.version(query.getVersion());
VersionType versionType = retrieveVersionTypeFromPersistentEntity(query.getObject().getClass());
VersionType versionType = retrieveVersionTypeFromPersistentEntity(
queryObject != null ? queryObject.getClass() : null);
indexRequest.versionType(versionType);
}
@ -930,15 +939,16 @@ class RequestFactory {
IndexRequestBuilder indexRequestBuilder;
if (query.getObject() != null) {
String id = StringUtils.isEmpty(query.getId()) ? getPersistentEntityId(query.getObject()) : query.getId();
Object queryObject = query.getObject();
if (queryObject != null) {
String id = StringUtils.isEmpty(query.getId()) ? getPersistentEntityId(queryObject) : query.getId();
// If we have a query id and a document id, do not ask ES to generate one.
if (id != null) {
indexRequestBuilder = client.prepareIndex(indexName, type, id);
} else {
indexRequestBuilder = client.prepareIndex(indexName, type);
}
indexRequestBuilder.setSource(elasticsearchConverter.mapObject(query.getObject()).toJson(),
indexRequestBuilder.setSource(elasticsearchConverter.mapObject(queryObject).toJson(),
Requests.INDEX_CONTENT_TYPE);
} else if (query.getSource() != null) {
indexRequestBuilder = client.prepareIndex(indexName, type, query.getId()).setSource(query.getSource(),
@ -949,7 +959,8 @@ class RequestFactory {
}
if (query.getVersion() != null) {
indexRequestBuilder.setVersion(query.getVersion());
VersionType versionType = retrieveVersionTypeFromPersistentEntity(query.getObject().getClass());
VersionType versionType = retrieveVersionTypeFromPersistentEntity(
queryObject != null ? queryObject.getClass() : null);
indexRequestBuilder.setVersionType(versionType);
}
@ -1162,6 +1173,10 @@ class RequestFactory {
request.routing(query.getRoute());
}
if (query.getScrollTime() != null) {
request.scroll(TimeValue.timeValueMillis(query.getScrollTime().toMillis()));
}
request.source(sourceBuilder);
return request;
}
@ -1237,6 +1252,10 @@ class RequestFactory {
searchRequestBuilder.setRouting(query.getRoute());
}
if (query.getScrollTime() != null) {
searchRequestBuilder.setScroll(TimeValue.timeValueMillis(query.getScrollTime().toMillis()));
}
return searchRequestBuilder;
}
@ -1553,6 +1572,85 @@ class RequestFactory {
return elasticsearchFilter;
}
@Nullable
private ElasticsearchPersistentEntity<?> getPersistentEntity(@Nullable Class<?> clazz) {
return clazz != null ? elasticsearchConverter.getMappingContext().getPersistentEntity(clazz) : null;
}
@Nullable
private String getPersistentEntityId(Object entity) {
Object identifier = elasticsearchConverter.getMappingContext() //
.getRequiredPersistentEntity(entity.getClass()) //
.getIdentifierAccessor(entity).getIdentifier();
if (identifier != null) {
return identifier.toString();
}
return null;
}
private VersionType retrieveVersionTypeFromPersistentEntity(@Nullable Class<?> clazz) {
MappingContext<? extends ElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> mappingContext = elasticsearchConverter
.getMappingContext();
ElasticsearchPersistentEntity<?> persistentEntity = clazz != null ? mappingContext.getPersistentEntity(clazz)
: null;
VersionType versionType = null;
if (persistentEntity != null) {
versionType = persistentEntity.getVersionType();
}
return versionType != null ? versionType : VersionType.EXTERNAL;
}
private String[] toArray(List<String> values) {
String[] valuesAsArray = new String[values.size()];
return values.toArray(valuesAsArray);
}
// endregion
private boolean hasSeqNoPrimaryTermProperty(@Nullable Class<?> entityClass) {
if (entityClass == null) {
return false;
}
if (!elasticsearchConverter.getMappingContext().hasPersistentEntityFor(entityClass)) {
return false;
}
ElasticsearchPersistentEntity<?> entity = elasticsearchConverter.getMappingContext()
.getRequiredPersistentEntity(entityClass);
return entity.hasSeqNoPrimaryTermProperty();
}
private FetchSourceContext getFetchSourceContext(Query searchQuery) {
FetchSourceContext fetchSourceContext = null;
SourceFilter sourceFilter = searchQuery.getSourceFilter();
if (!isEmpty(searchQuery.getFields())) {
if (sourceFilter == null) {
sourceFilter = new FetchSourceFilter(toArray(searchQuery.getFields()), null);
} else {
ArrayList<String> arrayList = new ArrayList<>();
Collections.addAll(arrayList, sourceFilter.getIncludes());
sourceFilter = new FetchSourceFilter(toArray(arrayList), null);
}
fetchSourceContext = new FetchSourceContext(true, sourceFilter.getIncludes(), sourceFilter.getExcludes());
} else if (sourceFilter != null) {
fetchSourceContext = new FetchSourceContext(true, sourceFilter.getIncludes(), sourceFilter.getExcludes());
}
return fetchSourceContext;
}
// endregion
// region response stuff
/**
@ -1582,57 +1680,6 @@ class RequestFactory {
return settings;
}
// endregion
// region helper functions
@Nullable
private ElasticsearchPersistentEntity<?> getPersistentEntity(@Nullable Class<?> clazz) {
return clazz != null ? elasticsearchConverter.getMappingContext().getPersistentEntity(clazz) : null;
}
@Nullable
private String getPersistentEntityId(Object entity) {
Object identifier = elasticsearchConverter.getMappingContext() //
.getRequiredPersistentEntity(entity.getClass()) //
.getIdentifierAccessor(entity).getIdentifier();
if (identifier != null) {
return identifier.toString();
}
return null;
}
private VersionType retrieveVersionTypeFromPersistentEntity(Class<?> clazz) {
VersionType versionType = elasticsearchConverter.getMappingContext().getRequiredPersistentEntity(clazz)
.getVersionType();
return versionType != null ? versionType : VersionType.EXTERNAL;
}
private String[] toArray(List<String> values) {
String[] valuesAsArray = new String[values.size()];
return values.toArray(valuesAsArray);
}
// endregion
private boolean hasSeqNoPrimaryTermProperty(@Nullable Class<?> entityClass) {
if (entityClass == null) {
return false;
}
if (!elasticsearchConverter.getMappingContext().hasPersistentEntityFor(entityClass)) {
return false;
}
ElasticsearchPersistentEntity<?> entity = elasticsearchConverter.getMappingContext()
.getRequiredPersistentEntity(entityClass);
return entity.hasSeqNoPrimaryTermProperty();
}
// endregion
}

View File

@ -86,6 +86,12 @@ public final class SearchHitSupport {
return unwrapSearchHitsIterator((SearchHitsIterator<?>) result);
}
if (result instanceof SearchPage<?>) {
SearchPage<?> searchPage = (SearchPage<?>) result;
List<?> content = (List<?>) SearchHitSupport.unwrapSearchHits(searchPage.getSearchHits());
return new PageImpl<>(content, searchPage.getPageable(), searchPage.getTotalElements());
}
if (ReactiveWrappers.isAvailable(ReactiveWrappers.ReactiveLibrary.PROJECT_REACTOR)) {
if (result instanceof Flux) {

View File

@ -172,19 +172,19 @@ public class GeoConverters {
String type = GeoConverters.getGeoJsonType(source);
switch (type) {
case GeoJsonPoint.TYPE:
case "point":
return MapToGeoJsonPointConverter.INSTANCE.convert(source);
case GeoJsonMultiPoint.TYPE:
case "multipoint":
return MapToGeoJsonMultiPointConverter.INSTANCE.convert(source);
case GeoJsonLineString.TYPE:
case "linestring":
return MapToGeoJsonLineStringConverter.INSTANCE.convert(source);
case GeoJsonMultiLineString.TYPE:
case "multilinestring":
return MapToGeoJsonMultiLineStringConverter.INSTANCE.convert(source);
case GeoJsonPolygon.TYPE:
case "polygon":
return MapToGeoJsonPolygonConverter.INSTANCE.convert(source);
case GeoJsonMultiPolygon.TYPE:
case "multipolygon":
return MapToGeoJsonMultiPolygonConverter.INSTANCE.convert(source);
case GeoJsonGeometryCollection.TYPE:
case "geometrycollection":
return MapToGeoJsonGeometryCollectionConverter.INSTANCE.convert(source);
default:
throw new IllegalArgumentException("unknown GeoJson type " + type);
@ -217,7 +217,7 @@ public class GeoConverters {
public GeoJsonPoint convert(Map<String, Object> source) {
String type = GeoConverters.getGeoJsonType(source);
Assert.isTrue(type.equals(GeoJsonPoint.TYPE), "does not contain a type 'Point'");
Assert.isTrue(type.equalsIgnoreCase(GeoJsonPoint.TYPE), "does not contain a type 'Point'");
Object coordinates = source.get("coordinates");
Assert.notNull(coordinates, "Document to convert does not contain coordinates");
@ -255,7 +255,7 @@ public class GeoConverters {
public GeoJsonMultiPoint convert(Map<String, Object> source) {
String type = GeoConverters.getGeoJsonType(source);
Assert.isTrue(type.equals(GeoJsonMultiPoint.TYPE), "does not contain a type 'MultiPoint'");
Assert.isTrue(type.equalsIgnoreCase(GeoJsonMultiPoint.TYPE), "does not contain a type 'MultiPoint'");
Object coordinates = source.get("coordinates");
Assert.notNull(coordinates, "Document to convert does not contain coordinates");
Assert.isTrue(coordinates instanceof List, "coordinates must be a List");
@ -290,7 +290,7 @@ public class GeoConverters {
public GeoJsonLineString convert(Map<String, Object> source) {
String type = GeoConverters.getGeoJsonType(source);
Assert.isTrue(type.equals(GeoJsonLineString.TYPE), "does not contain a type 'LineString'");
Assert.isTrue(type.equalsIgnoreCase(GeoJsonLineString.TYPE), "does not contain a type 'LineString'");
Object coordinates = source.get("coordinates");
Assert.notNull(coordinates, "Document to convert does not contain coordinates");
Assert.isTrue(coordinates instanceof List, "coordinates must be a List");
@ -322,7 +322,7 @@ public class GeoConverters {
public GeoJsonMultiLineString convert(Map<String, Object> source) {
String type = GeoConverters.getGeoJsonType(source);
Assert.isTrue(type.equals(GeoJsonMultiLineString.TYPE), "does not contain a type 'MultiLineString'");
Assert.isTrue(type.equalsIgnoreCase(GeoJsonMultiLineString.TYPE), "does not contain a type 'MultiLineString'");
List<GeoJsonLineString> lines = geoJsonLineStringsFromMap(source);
return GeoJsonMultiLineString.of(lines);
}
@ -350,7 +350,7 @@ public class GeoConverters {
public GeoJsonPolygon convert(Map<String, Object> source) {
String type = GeoConverters.getGeoJsonType(source);
Assert.isTrue(type.equals(GeoJsonPolygon.TYPE), "does not contain a type 'Polygon'");
Assert.isTrue(type.equalsIgnoreCase(GeoJsonPolygon.TYPE), "does not contain a type 'Polygon'");
List<GeoJsonLineString> lines = geoJsonLineStringsFromMap(source);
Assert.isTrue(lines.size() > 0, "no linestrings defined in polygon");
GeoJsonPolygon geoJsonPolygon = GeoJsonPolygon.of(lines.get(0));
@ -394,7 +394,7 @@ public class GeoConverters {
public GeoJsonMultiPolygon convert(Map<String, Object> source) {
String type = GeoConverters.getGeoJsonType(source);
Assert.isTrue(type.equals(GeoJsonMultiPolygon.TYPE), "does not contain a type 'MultiPolygon'");
Assert.isTrue(type.equalsIgnoreCase(GeoJsonMultiPolygon.TYPE), "does not contain a type 'MultiPolygon'");
Object coordinates = source.get("coordinates");
Assert.notNull(coordinates, "Document to convert does not contain coordinates");
Assert.isTrue(coordinates instanceof List, "coordinates must be a List");
@ -441,7 +441,8 @@ public class GeoConverters {
public GeoJsonGeometryCollection convert(Map<String, Object> source) {
String type = GeoConverters.getGeoJsonType(source);
Assert.isTrue(type.equals(GeoJsonGeometryCollection.TYPE), "does not contain a type 'GeometryCollection'");
Assert.isTrue(type.equalsIgnoreCase(GeoJsonGeometryCollection.TYPE),
"does not contain a type 'GeometryCollection'");
Object geometries = source.get("geometries");
Assert.notNull(geometries, "Document to convert does not contain geometries");
Assert.isTrue(geometries instanceof List, "geometries must be a List");
@ -461,7 +462,7 @@ public class GeoConverters {
Assert.notNull(type, "Document to convert does not contain a type");
Assert.isTrue(type instanceof String, "type must be a String");
return type.toString();
return type.toString().toLowerCase();
}
private static List<Double> toCoordinates(Point point) {

View File

@ -216,6 +216,7 @@ public class MappingBuilder {
Field fieldAnnotation = property.findAnnotation(Field.class);
boolean isCompletionProperty = property.isCompletionProperty();
boolean isNestedOrObjectProperty = isNestedOrObjectProperty(property);
DynamicMapping dynamicMapping = property.findAnnotation(DynamicMapping.class);
if (!isCompletionProperty && property.isEntity() && hasRelevantAnnotation(property)) {
@ -230,7 +231,7 @@ public class MappingBuilder {
: null;
mapEntity(builder, persistentEntity, false, property.getFieldName(), true, fieldAnnotation.type(),
fieldAnnotation, property.findAnnotation(DynamicMapping.class));
fieldAnnotation, dynamicMapping);
return;
}
}
@ -245,9 +246,9 @@ public class MappingBuilder {
if (isRootObject && fieldAnnotation != null && property.isIdProperty()) {
applyDefaultIdFieldMapping(builder, property);
} else if (multiField != null) {
addMultiFieldMapping(builder, property, multiField, isNestedOrObjectProperty);
addMultiFieldMapping(builder, property, multiField, isNestedOrObjectProperty, dynamicMapping);
} else if (fieldAnnotation != null) {
addSingleFieldMapping(builder, property, fieldAnnotation, isNestedOrObjectProperty);
addSingleFieldMapping(builder, property, fieldAnnotation, isNestedOrObjectProperty, dynamicMapping);
}
}
@ -328,7 +329,7 @@ public class MappingBuilder {
* @throws IOException
*/
private void addSingleFieldMapping(XContentBuilder builder, ElasticsearchPersistentProperty property,
Field annotation, boolean nestedOrObjectField) throws IOException {
Field annotation, boolean nestedOrObjectField, @Nullable DynamicMapping dynamicMapping) throws IOException {
// build the property json, if empty skip it as this is no valid mapping
XContentBuilder propertyBuilder = jsonBuilder().startObject();
@ -340,6 +341,11 @@ public class MappingBuilder {
}
builder.startObject(property.getFieldName());
if (nestedOrObjectField && dynamicMapping != null) {
builder.field(TYPE_DYNAMIC, dynamicMapping.value().name().toLowerCase());
}
addFieldMappingParameters(builder, annotation, nestedOrObjectField);
builder.endObject();
}
@ -380,10 +386,15 @@ public class MappingBuilder {
* @throws IOException
*/
private void addMultiFieldMapping(XContentBuilder builder, ElasticsearchPersistentProperty property,
MultiField annotation, boolean nestedOrObjectField) throws IOException {
MultiField annotation, boolean nestedOrObjectField, @Nullable DynamicMapping dynamicMapping) throws IOException {
// main field
builder.startObject(property.getFieldName());
if (nestedOrObjectField && dynamicMapping != null) {
builder.field(TYPE_DYNAMIC, dynamicMapping.value().name().toLowerCase());
}
addFieldMappingParameters(builder, annotation.mainField(), nestedOrObjectField);
// inner fields

View File

@ -15,10 +15,14 @@
*/
package org.springframework.data.elasticsearch.core.join;
import java.util.Objects;
import org.springframework.data.annotation.PersistenceConstructor;
import org.springframework.lang.Nullable;
/**
* @author Subhobrata Dey
* @author Sascha Woo
* @since 4.1
*/
public class JoinField<ID> {
@ -35,6 +39,7 @@ public class JoinField<ID> {
this(name, null);
}
@PersistenceConstructor
public JoinField(String name, @Nullable ID parent) {
this.name = name;
this.parent = parent;
@ -52,4 +57,21 @@ public class JoinField<ID> {
public String getName() {
return name;
}
@Override
public int hashCode() {
return Objects.hash(name, parent);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof JoinField)) {
return false;
}
JoinField other = (JoinField) obj;
return Objects.equals(name, other.name) && Objects.equals(parent, other.parent);
}
}

View File

@ -31,6 +31,11 @@ import org.springframework.data.util.TypeInformation;
public class SimpleElasticsearchMappingContext
extends AbstractMappingContext<SimpleElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> {
@Override
protected boolean shouldCreatePersistentEntityFor(TypeInformation<?> type) {
return !ElasticsearchSimpleTypes.HOLDER.isSimpleType(type.getType());
}
@Override
protected <T> SimpleElasticsearchPersistentEntity<?> createPersistentEntity(TypeInformation<T> typeInformation) {
return new SimpleElasticsearchPersistentEntity<>(typeInformation);

View File

@ -31,6 +31,9 @@ import org.springframework.data.mapping.MappingException;
import org.springframework.data.mapping.PropertyHandler;
import org.springframework.data.mapping.model.BasicPersistentEntity;
import org.springframework.data.mapping.model.PersistentPropertyAccessorFactory;
import org.springframework.data.spel.EvaluationContextProvider;
import org.springframework.data.spel.ExpressionDependencies;
import org.springframework.data.util.Lazy;
import org.springframework.data.util.TypeInformation;
import org.springframework.expression.EvaluationContext;
import org.springframework.expression.Expression;
@ -74,7 +77,10 @@ public class SimpleElasticsearchPersistentEntity<T> extends BasicPersistentEntit
private @Nullable VersionType versionType;
private boolean createIndexAndMapping;
private final Map<String, ElasticsearchPersistentProperty> fieldNamePropertyCache = new ConcurrentHashMap<>();
private EvaluationContextProvider evaluationContextProvider = EvaluationContextProvider.DEFAULT;;
private final ConcurrentHashMap<String, Expression> indexNameExpressions = new ConcurrentHashMap<>();
private final Lazy<EvaluationContext> indexNameEvaluationContext = Lazy.of(this::getIndexNameEvaluationContext);
public SimpleElasticsearchPersistentEntity(TypeInformation<T> typeInformation) {
@ -302,12 +308,20 @@ public class SimpleElasticsearchPersistentEntity<T> extends BasicPersistentEntit
return seqNoPrimaryTermProperty;
}
@Nullable
@Override
public ElasticsearchPersistentProperty getJoinFieldProperty() {
return joinFieldProperty;
}
// region SpEL handling
@Override
public void setEvaluationContextProvider(EvaluationContextProvider provider) {
super.setEvaluationContextProvider(provider);
this.evaluationContextProvider = provider;
}
/**
* resolves all the names in the IndexCoordinates object. If a name cannot be resolved, the original name is returned.
*
@ -316,14 +330,12 @@ public class SimpleElasticsearchPersistentEntity<T> extends BasicPersistentEntit
*/
private IndexCoordinates resolve(IndexCoordinates indexCoordinates) {
EvaluationContext context = getEvaluationContext(null);
String[] indexNames = indexCoordinates.getIndexNames();
String[] resolvedNames = new String[indexNames.length];
for (int i = 0; i < indexNames.length; i++) {
String indexName = indexNames[i];
resolvedNames[i] = resolve(context, indexName);
resolvedNames[i] = resolve(indexName);
}
return IndexCoordinates.of(resolvedNames);
@ -332,22 +344,49 @@ public class SimpleElasticsearchPersistentEntity<T> extends BasicPersistentEntit
/**
* tries to resolve the given name. If this is not successful, the original value is returned
*
* @param context SpEL evaluation context
* @param name name to resolve
* @return the resolved name or the input name if it cannot be resolved
*/
private String resolve(EvaluationContext context, String name) {
private String resolve(String name) {
Assert.notNull(name, "name must not be null");
Expression expression = indexNameExpressions.computeIfAbsent(name, s -> {
Expression expr = PARSER.parseExpression(name, ParserContext.TEMPLATE_EXPRESSION);
return expr instanceof LiteralExpression ? null : expr;
});
Expression expression = getExpressionForIndexName(name);
String resolvedName = expression != null ? expression.getValue(context, String.class) : null;
String resolvedName = expression != null ? expression.getValue(indexNameEvaluationContext.get(), String.class) : null;
return resolvedName != null ? resolvedName : name;
}
/**
* returns an {@link Expression} for #name if name contains a {@link ParserContext#TEMPLATE_EXPRESSION} otherwise
* returns {@literal null}.
*
* @param name the name to get the expression for
* @return Expression may be null
*/
@Nullable
private Expression getExpressionForIndexName(String name) {
return indexNameExpressions.computeIfAbsent(name, s -> {
Expression expr = PARSER.parseExpression(s, ParserContext.TEMPLATE_EXPRESSION);
return expr instanceof LiteralExpression ? null : expr;
});
}
/**
* build the {@link EvaluationContext} considering {@link ExpressionDependencies} from the name returned by
* {@link #getIndexName()}.
*
* @return EvaluationContext
*/
private EvaluationContext getIndexNameEvaluationContext() {
Expression expression = getExpressionForIndexName(getIndexName());
ExpressionDependencies expressionDependencies = expression != null ? ExpressionDependencies.discover(expression)
: ExpressionDependencies.none();
return evaluationContextProvider.getEvaluationContext(null, expressionDependencies);
}
// endregion
@Override
@ -363,5 +402,4 @@ public class SimpleElasticsearchPersistentEntity<T> extends BasicPersistentEntit
.put("index.refresh_interval", getRefreshInterval()).put("index.store.type", getIndexStoreType()).map();
return Document.from(map);
}
}

View File

@ -163,10 +163,13 @@ public class SimpleElasticsearchPersistentProperty extends
&& (isTemporalAccessor || isDate)) {
DateFormat dateFormat = field.format();
String property = getOwner().getType().getSimpleName() + "." + getName();
if (dateFormat == DateFormat.none) {
throw new MappingException(
String.format("Property %s is annotated with FieldType.%s but has no DateFormat defined",
getOwner().getType().getSimpleName() + "." + getName(), field.type().name()));
LOGGER.warn(
String.format("No DateFormat defined for property %s. Make sure you have a Converter registered for %s",
property, actualType.getSimpleName()));
return;
}
ElasticsearchDateConverter converter;
@ -177,7 +180,7 @@ public class SimpleElasticsearchPersistentProperty extends
if (!StringUtils.hasLength(pattern)) {
throw new MappingException(
String.format("Property %s is annotated with FieldType.%s and a custom format but has no pattern defined",
getOwner().getType().getSimpleName() + "." + getName(), field.type().name()));
property, field.type().name()));
}
converter = ElasticsearchDateConverter.of(pattern);

View File

@ -23,6 +23,7 @@ import java.util.List;
import org.springframework.data.domain.Pageable;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
/**
* MoreLikeThisQuery
@ -176,6 +177,9 @@ public class MoreLikeThisQuery {
}
public void setPageable(Pageable pageable) {
Assert.notNull(pageable, "pageable must not be null");
this.pageable = pageable;
}
}

View File

@ -54,14 +54,14 @@ public class ElasticsearchPartQuery extends AbstractElasticsearchRepositoryQuery
public ElasticsearchPartQuery(ElasticsearchQueryMethod method, ElasticsearchOperations elasticsearchOperations) {
super(method, elasticsearchOperations);
this.tree = new PartTree(method.getName(), method.getEntityInformation().getJavaType());
this.tree = new PartTree(queryMethod.getName(), queryMethod.getResultProcessor().getReturnedType().getDomainType());
this.elasticsearchConverter = elasticsearchOperations.getElasticsearchConverter();
this.mappingContext = elasticsearchConverter.getMappingContext();
}
@Override
public Object execute(Object[] parameters) {
Class<?> clazz = queryMethod.getEntityInformation().getJavaType();
Class<?> clazz = queryMethod.getResultProcessor().getReturnedType().getDomainType();
ParametersParameterAccessor accessor = new ParametersParameterAccessor(queryMethod.getParameters(), parameters);
CriteriaQuery query = createQuery(accessor);
@ -126,7 +126,9 @@ public class ElasticsearchPartQuery extends AbstractElasticsearchRepositoryQuery
result = elasticsearchOperations.searchOne(query, clazz, index);
}
return queryMethod.isNotSearchHitMethod() ? SearchHitSupport.unwrapSearchHits(result) : result;
return (queryMethod.isNotSearchHitMethod() && !queryMethod.isSearchPageMethod())
? SearchHitSupport.unwrapSearchHits(result)
: result;
}
@Nullable

View File

@ -1,5 +1,5 @@
/*
* Copyright 2013-2020 the original author or authors.
* Copyright 2013-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -15,22 +15,17 @@
*/
package org.springframework.data.elasticsearch.repository.query;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.SearchHitSupport;
import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.data.elasticsearch.core.convert.DateTimeConverters;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.StringQuery;
import org.springframework.data.elasticsearch.repository.support.StringQueryUtil;
import org.springframework.data.repository.query.ParametersParameterAccessor;
import org.springframework.data.util.StreamUtils;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.NumberUtils;
/**
* ElasticsearchStringQuery
@ -43,25 +38,8 @@ import org.springframework.util.NumberUtils;
*/
public class ElasticsearchStringQuery extends AbstractElasticsearchRepositoryQuery {
private static final Pattern PARAMETER_PLACEHOLDER = Pattern.compile("\\?(\\d+)");
private String query;
private final GenericConversionService conversionService = new GenericConversionService();
{
if (!conversionService.canConvert(java.util.Date.class, String.class)) {
conversionService.addConverter(DateTimeConverters.JavaDateConverter.INSTANCE);
}
if (ClassUtils.isPresent("org.joda.time.DateTimeZone", ElasticsearchStringQuery.class.getClassLoader())) {
if (!conversionService.canConvert(org.joda.time.ReadableInstant.class, String.class)) {
conversionService.addConverter(DateTimeConverters.JodaDateTimeConverter.INSTANCE);
}
if (!conversionService.canConvert(org.joda.time.LocalDateTime.class, String.class)) {
conversionService.addConverter(DateTimeConverters.JodaLocalDateTimeConverter.INSTANCE);
}
}
}
public ElasticsearchStringQuery(ElasticsearchQueryMethod queryMethod, ElasticsearchOperations elasticsearchOperations,
String query) {
super(queryMethod, elasticsearchOperations);
@ -71,7 +49,7 @@ public class ElasticsearchStringQuery extends AbstractElasticsearchRepositoryQue
@Override
public Object execute(Object[] parameters) {
Class<?> clazz = queryMethod.getEntityInformation().getJavaType();
Class<?> clazz = queryMethod.getResultProcessor().getReturnedType().getDomainType();
ParametersParameterAccessor accessor = new ParametersParameterAccessor(queryMethod.getParameters(), parameters);
StringQuery stringQuery = createQuery(accessor);
@ -89,52 +67,32 @@ public class ElasticsearchStringQuery extends AbstractElasticsearchRepositoryQue
if (queryMethod.isPageQuery()) {
stringQuery.setPageable(accessor.getPageable());
SearchHits<?> searchHits = elasticsearchOperations.search(stringQuery, clazz, index);
result = SearchHitSupport.page(searchHits, stringQuery.getPageable());
} else if (queryMethod.isStreamQuery()) {
if (accessor.getPageable().isUnpaged()) {
stringQuery.setPageable(PageRequest.of(0, DEFAULT_STREAM_BATCH_SIZE));
if (queryMethod.isSearchPageMethod()) {
result = SearchHitSupport.searchPageFor(searchHits, stringQuery.getPageable());
} else {
stringQuery.setPageable(accessor.getPageable());
result = SearchHitSupport
.unwrapSearchHits(SearchHitSupport.searchPageFor(searchHits, stringQuery.getPageable()));
}
} else if (queryMethod.isStreamQuery()) {
stringQuery.setPageable(
accessor.getPageable().isPaged() ? accessor.getPageable() : PageRequest.of(0, DEFAULT_STREAM_BATCH_SIZE));
result = StreamUtils.createStreamFromIterator(elasticsearchOperations.searchForStream(stringQuery, clazz, index));
} else if (queryMethod.isCollectionQuery()) {
if (accessor.getPageable().isPaged()) {
stringQuery.setPageable(accessor.getPageable());
}
stringQuery.setPageable(accessor.getPageable().isPaged() ? accessor.getPageable() : Pageable.unpaged());
result = elasticsearchOperations.search(stringQuery, clazz, index);
} else {
result = elasticsearchOperations.searchOne(stringQuery, clazz, index);
}
return queryMethod.isNotSearchHitMethod() ? SearchHitSupport.unwrapSearchHits(result) : result;
return (queryMethod.isNotSearchHitMethod() && !queryMethod.isSearchPageMethod())
? SearchHitSupport.unwrapSearchHits(result)
: result;
}
protected StringQuery createQuery(ParametersParameterAccessor parameterAccessor) {
String queryString = replacePlaceholders(this.query, parameterAccessor);
String queryString = new StringQueryUtil(elasticsearchOperations.getElasticsearchConverter().getConversionService())
.replacePlaceholders(this.query, parameterAccessor);
return new StringQuery(queryString);
}
private String replacePlaceholders(String input, ParametersParameterAccessor accessor) {
Matcher matcher = PARAMETER_PLACEHOLDER.matcher(input);
String result = input;
while (matcher.find()) {
String placeholder = Pattern.quote(matcher.group()) + "(?!\\d+)";
int index = NumberUtils.parseNumber(matcher.group(1), Integer.class);
result = result.replaceAll(placeholder, getParameterWithIndex(accessor, index));
}
return result;
}
private String getParameterWithIndex(ParametersParameterAccessor accessor, int index) {
Object parameter = accessor.getBindableValue(index);
if (parameter == null) {
return "null";
}
if (conversionService.canConvert(parameter.getClass(), String.class)) {
return conversionService.convert(parameter, String.class);
}
return parameter.toString();
}
}

View File

@ -15,15 +15,11 @@
*/
package org.springframework.data.elasticsearch.repository.query;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.springframework.data.elasticsearch.core.ReactiveElasticsearchOperations;
import org.springframework.data.elasticsearch.core.query.StringQuery;
import org.springframework.data.elasticsearch.repository.support.StringQueryUtil;
import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.util.NumberUtils;
import org.springframework.util.ObjectUtils;
/**
* @author Christoph Strobl
@ -32,7 +28,6 @@ import org.springframework.util.ObjectUtils;
*/
public class ReactiveElasticsearchStringQuery extends AbstractReactiveElasticsearchRepositoryQuery {
private static final Pattern PARAMETER_PLACEHOLDER = Pattern.compile("\\?(\\d+)");
private final String query;
public ReactiveElasticsearchStringQuery(ReactiveElasticsearchQueryMethod queryMethod,
@ -52,27 +47,12 @@ public class ReactiveElasticsearchStringQuery extends AbstractReactiveElasticsea
@Override
protected StringQuery createQuery(ElasticsearchParameterAccessor parameterAccessor) {
String queryString = replacePlaceholders(this.query, parameterAccessor);
String queryString = new StringQueryUtil(
getElasticsearchOperations().getElasticsearchConverter().getConversionService()).replacePlaceholders(this.query,
parameterAccessor);
return new StringQuery(queryString);
}
private String replacePlaceholders(String input, ElasticsearchParameterAccessor accessor) {
Matcher matcher = PARAMETER_PLACEHOLDER.matcher(input);
String result = input;
while (matcher.find()) {
String placeholder = Pattern.quote(matcher.group()) + "(?!\\d+)";
int index = NumberUtils.parseNumber(matcher.group(1), Integer.class);
result = result.replaceAll(placeholder, getParameterWithIndex(accessor, index));
}
return result;
}
private String getParameterWithIndex(ElasticsearchParameterAccessor accessor, int index) {
return ObjectUtils.nullSafeToString(accessor.getBindableValue(index));
}
@Override
boolean isCountQuery() {
return false;

View File

@ -0,0 +1,135 @@
/*
* Copyright 2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch.repository.support;
import java.util.Collection;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.springframework.core.convert.ConversionService;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.data.elasticsearch.core.convert.DateTimeConverters;
import org.springframework.data.elasticsearch.repository.query.ElasticsearchStringQuery;
import org.springframework.data.repository.query.ParameterAccessor;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.NumberUtils;
/**
* @author Peter-Josef Meisch
* @author Niklas Herder
*/
final public class StringQueryUtil {
private static final Pattern PARAMETER_PLACEHOLDER = Pattern.compile("\\?(\\d+)");
private final ConversionService conversionService;
private final GenericConversionService genericConversionService;
public StringQueryUtil(ConversionService conversionService) {
Assert.notNull(conversionService, "conversionService must not be null");
this.conversionService = conversionService;
genericConversionService = setupGenericConversionService();
}
private GenericConversionService setupGenericConversionService() {
GenericConversionService genericConversionService = new GenericConversionService();
if (!genericConversionService.canConvert(java.util.Date.class, String.class)) {
genericConversionService.addConverter(DateTimeConverters.JavaDateConverter.INSTANCE);
}
if (ClassUtils.isPresent("org.joda.time.DateTimeZone", ElasticsearchStringQuery.class.getClassLoader())) {
if (!genericConversionService.canConvert(org.joda.time.ReadableInstant.class, String.class)) {
genericConversionService.addConverter(DateTimeConverters.JodaDateTimeConverter.INSTANCE);
}
if (!genericConversionService.canConvert(org.joda.time.LocalDateTime.class, String.class)) {
genericConversionService.addConverter(DateTimeConverters.JodaLocalDateTimeConverter.INSTANCE);
}
}
return genericConversionService;
}
public String replacePlaceholders(String input, ParameterAccessor accessor) {
Matcher matcher = PARAMETER_PLACEHOLDER.matcher(input);
String result = input;
while (matcher.find()) {
String placeholder = Pattern.quote(matcher.group()) + "(?!\\d+)";
int index = NumberUtils.parseNumber(matcher.group(1), Integer.class);
result = result.replaceAll(placeholder, Matcher.quoteReplacement(getParameterWithIndex(accessor, index)));
}
return result;
}
private String getParameterWithIndex(ParameterAccessor accessor, int index) {
Object parameter = accessor.getBindableValue(index);
String parameterValue = "null";
// noinspection ConstantConditions
if (parameter != null) {
parameterValue = convert(parameter);
}
return parameterValue;
}
private String convert(Object parameter) {
if (Collection.class.isAssignableFrom(parameter.getClass())) {
Collection<?> collectionParam = (Collection<?>) parameter;
StringBuilder sb = new StringBuilder("[");
sb.append(collectionParam.stream().map(o -> {
if (o instanceof String) {
return "\"" + convert(o) + "\"";
} else {
return convert(o);
}
}).collect(Collectors.joining(",")));
sb.append("]");
return sb.toString();
} else {
String parameterValue = "null";
if (conversionService.canConvert(parameter.getClass(), String.class)) {
String converted = conversionService.convert(parameter, String.class);
if (converted != null) {
parameterValue = converted;
}
} else if (genericConversionService.canConvert(parameter.getClass(), String.class)) {
String converted = genericConversionService.convert(parameter, String.class);
if (converted != null) {
parameterValue = converted;
}
} else {
parameterValue = parameter.toString();
}
parameterValue = parameterValue.replaceAll("\"", Matcher.quoteReplacement("\\\""));
return parameterValue;
}
}
}

View File

@ -1,6 +1,251 @@
Spring Data Elasticsearch Changelog
===================================
Changes in version 4.1.11 (2021-07-16)
--------------------------------------
* #1866 - Queries defined with `@Query` are not using registered converters for parameter conversion.
* #1858 - Collection parameters for @Query-annotated methods get escaped wrongly.
* #1846 - Missing hashCode and equals methods in JoinField.
Changes in version 4.2.2 (2021-06-22)
-------------------------------------
* #1834 - TopMetricsAggregation NamedObjectNotFoundException: unknown field [top_metrics].
Changes in version 4.1.10 (2021-06-22)
--------------------------------------
* #1843 - Pageable results and @Query annotation.
* #1834 - TopMetricsAggregation NamedObjectNotFoundException: unknown field [top_metrics].
Changes in version 4.2.1 (2021-05-14)
-------------------------------------
* #1811 - StringQuery execution crashes on return type `SearchPage<T>`.
* #1805 - Upgrade to Elasticsearch 7.12.1.
* #1794 - Refactor `DefaultReactiveElasticsearchClient` to do request customization with the `WebClient`.
* #1790 - Custom Query with string parameter which contains double quotes.
* #1787 - Search with MoreLikeThisQuery should use Pageable.
* #1785 - Fix documentation about auditing.
* #1778 - Custom property names must be used in SourceFilter and source fields.
* #1767 - DynamicMapping annotation should be applicable to any object field.
Changes in version 4.1.9 (2021-05-14)
-------------------------------------
* #1811 - StringQuery execution crashes on return type `SearchPage<T>`.
* #1790 - Custom Query with string parameter which contains double quotes.
* #1787 - Search with MoreLikeThisQuery should use Pageable.
* #1785 - Fix documentation about auditing.
* #1767 - DynamicMapping annotation should be applicable to any object field.
Changes in version 4.2.0 (2021-04-14)
-------------------------------------
* #1771 - Remove `@Persistent` from entity-scan include filters.
* #1761 - CriteriaQuery must use nested query only with properties of type nested.
* #1759 - health check with DefaultReactiveElasticsearchClient.
* #1758 - Nested Criteria queries must consider sub-fields.
* #1755 - Documentation fix to not show deprecated calls.
* #1754 - Types are in the process of being removed.
* #1753 - CriteriaQuery must support nested queries.
* #1390 - Introduce ClusterOperations [DATAES-818].
Changes in version 4.1.8 (2021-04-14)
-------------------------------------
* #1759 - health check with DefaultReactiveElasticsearchClient.
Changes in version 4.0.9.RELEASE (2021-04-14)
---------------------------------------------
* #1759 - health check with DefaultReactiveElasticsearchClient.
Changes in version 4.1.7 (2021-03-31)
-------------------------------------
Changes in version 4.2.0-RC1 (2021-03-31)
-----------------------------------------
* #1745 - Automatically close scroll context when returning streamed results.
* #1741 - Upgrade to Elasticsearch 7.12.
* #1738 - Readme lists artifacts with .RELEASE and .BUILD-SNAPSHOT suffixes.
* #1736 - Upgrade to OpenWebBeans 2.0.
* #1734 - Remove lombok.
* #1733 - Update CI to Java 16.
* #1727 - Allow multiple date formats for date fields.
* #1719 - Configure index settings with @Setting annotation.
Changes in version 4.2.0-M5 (2021-03-17)
----------------------------------------
* #1725 - Add support for SearchTemplate for reactive client.
* #1721 - IndexOps.getMapping raises exception if mapping contains "dynamic_templates".
* #1718 - Create index with mapping in one step.
* #1712 - Requests with ReactiveElasticsearchRepository methods doesn't fail if it can't connect with Elasticsearch.
* #1711 - Add the type hint _class attribute to the index mapping.
* #1704 - Add SearchFailure field in ByQueryResponse.
* #1700 - Add missing "Document ranking types".
* #1687 - Upgrade to Elasticsearch 7.11.
* #1686 - Add rescore functionality.
* #1678 - Errors are silent in multiGet.
* #1658 - ReactiveElasticsearchClient should use the same request parameters as non-reactive code.
* #1646 - Add function to list all indexes.
* #1514 - Add `matched_queries` field in SearchHit [DATAES-979].
Changes in version 4.1.6 (2021-03-17)
-------------------------------------
* #1712 - Requests with ReactiveElasticsearchRepository methods doesn't fail if it can't connect with Elasticsearch.
Changes in version 4.0.8.RELEASE (2021-03-17)
---------------------------------------------
* #1712 - Requests with ReactiveElasticsearchRepository methods doesn't fail if it can't connect with Elasticsearch.
Changes in version 4.2.0-M4 (2021-02-18)
----------------------------------------
Changes in version 4.1.5 (2021-02-18)
-------------------------------------
Changes in version 4.2.0-M3 (2021-02-17)
----------------------------------------
* #1689 - Missing anchor links in documentation.
* #1680 - After upgrade to 4.x can't read property id from _source named (different value from _id).
* #1679 - Errors are silent in delete by query in ReactiveElasticsearchTemplate.
* #1676 - Align MappingElasticsearchConverter with other Spring Data converters.
* #1675 - Consider Document as simple type.
* #1669 - Cleanup Deprecations from 4.0.
* #1668 - Writing a more complex CriteriaQuery.
* #1667 - Couldn't find PersistentEntity for type class com.example.demo.dto.Address.
* #1665 - ReactiveElasticsearchOperations indexName twice endcoding.
* #1662 - Documentation fix.
* #1659 - Fix source filter setup in multiget requests.
* #1655 - GeoJson types can be lowercase in Elasticsearch.
* #1649 - Upgrade to Elasticsearch 7.10.2.
* #1647 - Use own implementation of date formatters.
* #1644 - Implement update by query.
* #1565 - Allow using FieldNamingStrategy for property to fieldname matching [DATAES-993].
* #1370 - Add enabled mapping parameter to FieldType configuration [DATAES-798].
* #1218 - Add routing parameter to ElasticsearchOperations [DATAES-644].
* #1156 - Add @CountQuery annotation [DATAES-584].
* #1143 - Support for search_after [DATAES-571].
* #803 - Don't update indexed object if it is no persistent entity [DATAES-229].
* #725 - Add query Explain Support [DATAES-149].
Changes in version 4.1.4 (2021-02-17)
-------------------------------------
* #1667 - Couldn't find PersistentEntity for type class com.example.demo.dto.Address.
* #1665 - ReactiveElasticsearchOperations indexName twice endcoding.
* #1662 - Documentation fix.
* #1659 - Fix source filter setup in multiget requests.
* #1655 - GeoJson types can be lowercase in Elasticsearch.
Changes in version 4.0.7.RELEASE (2021-02-17)
---------------------------------------------
* DATAES-996 - Update CI jobs with Docker Login.
* #1667 - Couldn't find PersistentEntity for type class com.example.demo.dto.Address.
* #1665 - ReactiveElasticsearchOperations indexName twice endcoding.
* #1662 - Documentation fix.
* #1659 - Fix source filter setup in multiget requests.
Changes in version 3.2.13.RELEASE (2021-02-17)
----------------------------------------------
* #1694 - Upgrade to Elasticsearch 6.8.14.
* #1662 - Documentation fix.
Changes in version 4.2.0-M2 (2021-01-13)
----------------------------------------
* DATAES-1003 - add timeout to search query.
* DATAES-996 - Update CI jobs with Docker Login.
* DATAES-982 - Improve refresh handling.
* DATAES-946 - Support 'wildcard' field type.
* #1640 - Add support for GetFieldMapping request in ReactiveElasticsearchClient.
* #1638 - Upgrade to Elasticsearch 7.10.1.
* #1634 - Update Testcontainers dependency.
* #1632 - Update copyright notice to 2021.
* #1629 - Update repository after GitHub issues migration.
* #1576 - Add version of Spring dependency to docs [DATAES-1004].
* #1056 - Repository initialization should throw an Exception when index cannot be created [DATAES-481].
Changes in version 4.1.3 (2021-01-13)
-------------------------------------
* DATAES-996 - Update CI jobs with Docker Login.
* #1634 - Update Testcontainers dependency.
Changes in version 4.1.2 (2020-12-09)
-------------------------------------
* DATAES-991 - Wrong value for TermVector(with_positions_offets_payloads).
* DATAES-990 - Index creation fails with Authentication object cannot be null on startup.
* DATAES-987 - IndexOperations getMapping fail when using index alias.
* DATAES-978 - Accept DateFormat.none for a date property to enable custom Converters.
* DATAES-977 - Fix versions in reference documentation for 4.1.
* DATAES-973 - Release 4.1.2 (2020.0.2).
* DATAES-972 - BeforeConvertCallback should be called before index query is built.
* DATAES-543 - Adjust configuration support classes so they do not require proxying.
Changes in version 4.2.0-M1 (2020-12-09)
----------------------------------------
* DATAES-995 - Code Cleanup after DATACMNS-1838.
* DATAES-994 - Add setup for mutation testing.
* DATAES-991 - Wrong value for TermVector(with_positions_offets_payloads).
* DATAES-990 - Index creation fails with Authentication object cannot be null on startup.
* DATAES-989 - Improve deprecation warning for id properties without annotation.
* DATAES-988 - Allow specifying max results in NativeSearchQueryBuilder.
* DATAES-987 - IndexOperations getMapping fail when using index alias.
* DATAES-986 - Fix Javadoc.
* DATAES-985 - Add builder method for track_total_hits to NativeSearchQueryBuilder.
* DATAES-983 - Test dependency hoverfly-java-junit5 leaks into compile scope.
* DATAES-978 - Accept DateFormat.none for a date property to enable custom Converters.
* DATAES-976 - Implement CrudRepository.delete(Iterable<ID> ids).
* DATAES-975 - Upgrade to Elasticsearch 7.10.
* DATAES-974 - remove usage of deprecated WebClient exchange() method.
* DATAES-972 - BeforeConvertCallback should be called before index query is built.
* DATAES-971 - Fix tests for using a proxy with reactive client.
* DATAES-970 - Take Testcontainers version from the Spring Data Build pom.
* DATAES-969 - Use ResultProcessor in ElasticsearchPartQuery to build PartTree.
* DATAES-968 - Enable Maven caching for Jenkins jobs.
* DATAES-966 - Release 4.2 M1 (2021.0.0).
* DATAES-882 - HLRC Configuration - add ability to set max connections for the underlying HttpClient.
* DATAES-588 - Add support for custom callbacks in High Level/Low Level REST Client builder.
* DATAES-543 - Adjust configuration support classes so they do not require proxying.
* DATAES-362 - Add support for composable meta annotations.
* DATAES-247 - Support OpType in IndexQuery.
Changes in version 4.0.6.RELEASE (2020-12-09)
---------------------------------------------
* DATAES-991 - Wrong value for TermVector(with_positions_offets_payloads).
* DATAES-969 - Use ResultProcessor in ElasticsearchPartQuery to build PartTree.
* DATAES-968 - Enable Maven caching for Jenkins jobs.
* DATAES-964 - Release 4.0.6 (Neumann SR6).
Changes in version 3.2.12.RELEASE (2020-12-09)
----------------------------------------------
* DATAES-969 - Use ResultProcessor in ElasticsearchPartQuery to build PartTree.
* DATAES-963 - Release 3.2.12 (Moore SR12).
Changes in version 4.1.1 (2020-11-11)
-------------------------------------
* DATAES-969 - Use ResultProcessor in ElasticsearchPartQuery to build PartTree.
* DATAES-968 - Enable Maven caching for Jenkins jobs.
* DATAES-965 - Release 4.1.1 (2020.0.1).
Changes in version 4.1.0 (2020-10-28)
-------------------------------------
* DATAES-962 - Deprecate Joda support.
@ -1363,6 +1608,32 @@ Release Notes - Spring Data Elasticsearch - Version 1.0 M1 (2014-02-07)

View File

@ -1,4 +1,4 @@
Spring Data Elasticsearch 4.1 GA (2020.0.0)
Spring Data Elasticsearch 4.1.15 (2020.0.15)
Copyright (c) [2013-2019] Pivotal Software, Inc.
This product is licensed to you under the Apache License, Version 2.0 (the "License").
@ -15,6 +15,21 @@ conditions of the subcomponent's license, as noted in the LICENSE file.

View File

@ -22,20 +22,29 @@ import static org.mockito.Mockito.*;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import java.net.URI;
import java.util.Optional;
import java.util.function.Function;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.client.Request;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.http.HttpStatus;
import org.springframework.web.reactive.function.client.ClientResponse;
import org.springframework.web.reactive.function.client.WebClient;
import org.springframework.web.reactive.function.client.WebClient.ResponseSpec;
import org.springframework.web.util.UriBuilder;
/**
* @author Peter-Josef Meisch
@ -46,29 +55,23 @@ class DefaultReactiveElasticsearchClientTest {
@Mock private HostProvider hostProvider;
@Mock private Function<SearchRequest, Request> searchRequestConverter;
@Spy private RequestCreator requestCreator;
private DefaultReactiveElasticsearchClient client;
@Mock private WebClient webClient;
@BeforeEach
void setUp() {
client = new DefaultReactiveElasticsearchClient(hostProvider, new RequestCreator() {
@Override
public Function<SearchRequest, Request> search() {
return searchRequestConverter;
}
}) {
@Test
void shouldSetAppropriateRequestParametersOnCount() {
when(requestCreator.search()).thenReturn(searchRequestConverter);
SearchRequest searchRequest = new SearchRequest("someindex") //
.source(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()));
ReactiveElasticsearchClient client = new DefaultReactiveElasticsearchClient(hostProvider, requestCreator) {
@Override
public Mono<ResponseSpec> execute(ReactiveElasticsearchClientCallback callback) {
return Mono.empty();
}
};
}
@Test
void shouldSetAppropriateRequestParametersOnCount() {
SearchRequest searchRequest = new SearchRequest("someindex") //
.source(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()));
client.count(searchRequest).as(StepVerifier::create).verifyComplete();
@ -79,4 +82,33 @@ class DefaultReactiveElasticsearchClientTest {
assertThat(source.trackTotalHitsUpTo()).isEqualTo(TRACK_TOTAL_HITS_ACCURATE);
assertThat(source.fetchSource()).isEqualTo(FetchSourceContext.DO_NOT_FETCH_SOURCE);
}
@Test // #1712
@DisplayName("should throw ElasticsearchStatusException on server 5xx with empty body")
void shouldThrowElasticsearchStatusExceptionOnServer5xxWithEmptyBody() {
when(hostProvider.getActive(any())).thenReturn(Mono.just(webClient));
WebClient.RequestBodyUriSpec requestBodyUriSpec = mock(WebClient.RequestBodyUriSpec.class);
when(requestBodyUriSpec.uri((Function<UriBuilder, URI>) any())).thenReturn(requestBodyUriSpec);
when(requestBodyUriSpec.attribute(any(), any())).thenReturn(requestBodyUriSpec);
when(requestBodyUriSpec.headers(any())).thenReturn(requestBodyUriSpec);
when(webClient.method(any())).thenReturn(requestBodyUriSpec);
when(requestBodyUriSpec.exchangeToMono(any())).thenAnswer(invocationOnMock -> {
Function<ClientResponse, ? extends Mono<?>> responseHandler = invocationOnMock.getArgument(0);
ClientResponse clientResponse = mock(ClientResponse.class);
when(clientResponse.statusCode()).thenReturn(HttpStatus.SERVICE_UNAVAILABLE);
ClientResponse.Headers headers = mock(ClientResponse.Headers.class);
when(headers.contentType()).thenReturn(Optional.empty());
when(clientResponse.headers()).thenReturn(headers);
when(clientResponse.body(any())).thenReturn(Mono.empty());
return responseHandler.apply(clientResponse);
});
ReactiveElasticsearchClient client = new DefaultReactiveElasticsearchClient(hostProvider, requestCreator);
client.get(new GetRequest("42")) //
.as(StepVerifier::create) //
.expectError(ElasticsearchStatusException.class) //
.verify(); //
}
}

View File

@ -1,5 +1,5 @@
/*
* Copyright 2018-2020 the original author or authors.
* Copyright 2018-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -62,7 +62,7 @@ public class ReactiveElasticsearchClientUnitTests {
static final String HOST = ":9200";
MockDelegatingElasticsearchHostProvider<HostProvider> hostProvider;
MockDelegatingElasticsearchHostProvider<? extends HostProvider<?>> hostProvider;
ReactiveElasticsearchClient client;
@BeforeEach

View File

@ -186,7 +186,7 @@ public class ReactiveMockClientTestsUtils {
return delegate;
}
public MockDelegatingElasticsearchHostProvider<T> withActiveDefaultHost(String host) {
public MockDelegatingElasticsearchHostProvider<? extends HostProvider<?>> withActiveDefaultHost(String host) {
return new MockDelegatingElasticsearchHostProvider(HttpHeaders.EMPTY, clientProvider, errorCollector, delegate,
host);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright 2018-2020 the original author or authors.
* Copyright 2018-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -30,6 +30,7 @@ import org.springframework.data.elasticsearch.client.reactive.ReactiveMockClient
/**
* @author Christoph Strobl
* @author Peter-Josef Meisch
*/
public class SingleNodeHostProviderUnitTests {

View File

@ -18,6 +18,7 @@ package org.springframework.data.elasticsearch.config;
import static org.assertj.core.api.Assertions.*;
import static org.mockito.Mockito.*;
import org.springframework.context.annotation.Bean;
import reactor.core.publisher.Mono;
import java.util.Collection;
@ -114,6 +115,7 @@ public class ElasticsearchConfigurationSupportUnitTests {
static class ReactiveRestConfig extends AbstractReactiveElasticsearchConfiguration {
@Override
@Bean
public ReactiveElasticsearchClient reactiveElasticsearchClient() {
ReactiveElasticsearchClient client = mock(ReactiveElasticsearchClient.class);
when(client.info()).thenReturn(Mono

View File

@ -1,5 +1,5 @@
/*
* Copyright 2014-2020 the original author or authors.
* Copyright 2014-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -121,14 +121,12 @@ import org.springframework.lang.Nullable;
*/
public abstract class ElasticsearchTemplateTests {
protected static final String INDEX_NAME_JOIN_SAMPLE_ENTITY = "test-index-sample-join-template";
private static final String INDEX_NAME_SAMPLE_ENTITY = "test-index-sample-core-template";
private static final String INDEX_1_NAME = "test-index-1";
private static final String INDEX_2_NAME = "test-index-2";
private static final String INDEX_3_NAME = "test-index-3";
protected final IndexCoordinates index = IndexCoordinates.of(INDEX_NAME_SAMPLE_ENTITY);
protected static final String INDEX_NAME_JOIN_SAMPLE_ENTITY = "test-index-sample-join-template";
@Autowired protected ElasticsearchOperations operations;
protected IndexOperations indexOperations;
@ -1097,6 +1095,51 @@ public abstract class ElasticsearchTemplateTests {
assertThat(content).contains(sampleEntity);
}
@Test // #1787
@DisplayName("should use Pageable on MoreLikeThis queries")
void shouldUsePageableOnMoreLikeThisQueries() {
String sampleMessage = "So we build a web site or an application and want to add search to it, "
+ "and then it hits us: getting search working is hard. We want our search solution to be fast,"
+ " we want a painless setup and a completely free search schema, we want to be able to index data simply using JSON over HTTP, "
+ "we want our search server to be always available, we want to be able to start with one machine and scale to hundreds, "
+ "we want real-time search, we want simple multi-tenancy, and we want a solution that is built for the cloud.";
String referenceId = nextIdAsString();
Collection<String> ids = IntStream.rangeClosed(1, 10).mapToObj(i -> nextIdAsString()).collect(Collectors.toList());
ids.add(referenceId);
ids.stream()
.map(id -> getIndexQuery(
SampleEntity.builder().id(id).message(sampleMessage).version(System.currentTimeMillis()).build()))
.forEach(indexQuery -> operations.index(indexQuery, index));
indexOperations.refresh();
MoreLikeThisQuery moreLikeThisQuery = new MoreLikeThisQuery();
moreLikeThisQuery.setId(referenceId);
moreLikeThisQuery.addFields("message");
moreLikeThisQuery.setMinDocFreq(1);
moreLikeThisQuery.setPageable(PageRequest.of(0, 5));
SearchHits<SampleEntity> searchHits = operations.search(moreLikeThisQuery, SampleEntity.class, index);
assertThat(searchHits.getTotalHits()).isEqualTo(10);
assertThat(searchHits.getSearchHits()).hasSize(5);
Collection<String> returnedIds = searchHits.getSearchHits().stream().map(SearchHit::getId)
.collect(Collectors.toList());
moreLikeThisQuery.setPageable(PageRequest.of(1, 5));
searchHits = operations.search(moreLikeThisQuery, SampleEntity.class, index);
assertThat(searchHits.getTotalHits()).isEqualTo(10);
assertThat(searchHits.getSearchHits()).hasSize(5);
searchHits.getSearchHits().stream().map(SearchHit::getId).forEach(returnedIds::add);
assertThat(returnedIds).hasSize(10);
assertThat(ids).containsAll(returnedIds);
}
@Test // DATAES-167
public void shouldReturnResultsWithScanAndScrollForGivenCriteriaQuery() {
@ -1466,6 +1509,30 @@ public abstract class ElasticsearchTemplateTests {
assertThat(mapping.get("properties")).isNotNull();
}
@Test // DATAES-987
@DisplayName("should read mappings from alias")
void shouldReadMappingsFromAlias() {
String aliasName = INDEX_NAME_SAMPLE_ENTITY + "alias";
indexOperations.alias( //
new AliasActions( //
new AliasAction.Add( //
AliasActionParameters.builder() //
.withIndices(INDEX_NAME_SAMPLE_ENTITY) //
.withAliases(aliasName) //
.build()) //
) //
);
IndexOperations aliasIndexOps = operations.indexOps(IndexCoordinates.of(aliasName));
Map<String, Object> mappingFromAlias = aliasIndexOps.getMapping();
assertThat(mappingFromAlias).isNotNull();
assertThat(
((Map<String, Object>) ((Map<String, Object>) mappingFromAlias.get("properties")).get("message")).get("type"))
.isEqualTo("text");
}
@Test
public void shouldDeleteIndexForGivenEntity() {
@ -3604,6 +3671,21 @@ public abstract class ElasticsearchTemplateTests {
softly.assertThat(searchHits.getTotalHitsRelation()).isEqualTo(TotalHitsRelation.OFF);
softly.assertAll();
}
@Test // #1893
@DisplayName("should index document from source with version")
void shouldIndexDocumentFromSourceWithVersion() {
String source = "{\n" + //
" \"answer\": 42\n" + //
"}";
IndexQuery query = new IndexQueryBuilder() //
.withId("42") //
.withSource(source) //
.withVersion(42L) //
.build();
operations.index(query, IndexCoordinates.of(INDEX_NAME_SAMPLE_ENTITY));
}
@Data
@NoArgsConstructor
@ -3800,4 +3882,5 @@ public abstract class ElasticsearchTemplateTests {
@JoinTypeRelation(parent = "question", children = { "answer" }) }) private JoinField<String> myJoinField;
@Field(type = Text) private String text;
}
// endregion
}

View File

@ -32,6 +32,8 @@ import java.lang.Boolean;
import java.lang.Long;
import java.lang.Object;
import java.net.ConnectException;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -1037,6 +1039,32 @@ public class ReactiveElasticsearchTemplateIntegrationTests {
assertThat(searchHits.getSearchHits().size()).isEqualTo(5);
}).verifyComplete();
}
@Test // #1665
@DisplayName("should be able to process date-math-index names")
void shouldBeAbleToProcessDateMathIndexNames() {
String indexName = "foo-" + LocalDate.now().format(DateTimeFormatter.ofPattern("yyyy.MM"));
String dateMathIndexName = "<foo-{now/M{yyyy.MM}}>";
template.indexOps(IndexCoordinates.of(dateMathIndexName)) //
.create() //
.as(StepVerifier::create) //
.expectNext(true) //
.verifyComplete(); //
template.indexOps(IndexCoordinates.of(indexName)) //
.exists() //
.as(StepVerifier::create) //
.expectNext(true) //
.verifyComplete(); //
template.indexOps(IndexCoordinates.of(dateMathIndexName)) //
.delete() //
.as(StepVerifier::create) //
.expectNext(true) //
.verifyComplete(); //
}
// endregion
// region Helper functions
@ -1134,5 +1162,6 @@ public class ReactiveElasticsearchTemplateIntegrationTests {
@Id private String id;
@Version private Long version;
}
// endregion
}

View File

@ -0,0 +1,216 @@
/*
* Copyright 2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch.core;
import static org.assertj.core.api.Assertions.*;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Collections;
import java.util.List;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.data.elasticsearch.core.query.SourceFilter;
import org.springframework.data.elasticsearch.junit.jupiter.ElasticsearchRestTemplateConfiguration;
import org.springframework.data.elasticsearch.junit.jupiter.SpringIntegrationTest;
import org.springframework.test.context.ContextConfiguration;
/**
* @author Peter-Josef Meisch
*/
@SpringIntegrationTest
@ContextConfiguration(classes = { ElasticsearchRestTemplateConfiguration.class })
public class SourceFilterIntegrationTests {
@Autowired private ElasticsearchOperations operations;
private IndexOperations indexOps;
@BeforeEach
void setUp() {
indexOps = operations.indexOps(Entity.class);
indexOps.create();
indexOps.putMapping();
operations.save(Entity.builder().id("42").field1("one").field2("two").field3("three").build());
indexOps.refresh();
}
@AfterEach
void tearDown() {
indexOps.delete();
}
@Test // #1659
@DisplayName("should only return requested fields on search")
void shouldOnlyReturnRequestedFieldsOnSearch() {
Query query = Query.findAll();
query.addFields("field2");
SearchHits<Entity> searchHits = operations.search(query, Entity.class);
assertThat(searchHits).hasSize(1);
Entity entity = searchHits.getSearchHit(0).getContent();
assertThat(entity.getField1()).isNull();
assertThat(entity.getField2()).isEqualTo("two");
assertThat(entity.getField3()).isNull();
}
@Test // #1659
@DisplayName("should only return requested fields on multiget")
void shouldOnlyReturnRequestedFieldsOnGMultiGet() {
Query query = new NativeSearchQueryBuilder().withIds(Collections.singleton("42")).build();
query.addFields("field2");
List<Entity> entities = operations.multiGet(query, Entity.class);
assertThat(entities).hasSize(1);
Entity entity = entities.get(0);
assertThat(entity.getField1()).isNull();
assertThat(entity.getField2()).isEqualTo("two");
assertThat(entity.getField3()).isNull();
}
@Test // #1659
@DisplayName("should not return excluded fields from SourceFilter on search")
void shouldNotReturnExcludedFieldsFromSourceFilterOnSearch() {
Query query = Query.findAll();
query.addSourceFilter(new SourceFilter() {
@Override
public String[] getIncludes() {
return new String[] {};
}
@Override
public String[] getExcludes() {
return new String[] { "field2" };
}
});
SearchHits<Entity> entities = operations.search(query, Entity.class);
assertThat(entities).hasSize(1);
Entity entity = entities.getSearchHit(0).getContent();
assertThat(entity.getField1()).isNotNull();
assertThat(entity.getField2()).isNull();
assertThat(entity.getField3()).isNotNull();
}
@Test // #1659
@DisplayName("should not return excluded fields from SourceFilter on multiget")
void shouldNotReturnExcludedFieldsFromSourceFilterOnMultiGet() {
Query query = new NativeSearchQueryBuilder().withIds(Collections.singleton("42")).build();
query.addSourceFilter(new SourceFilter() {
@Override
public String[] getIncludes() {
return new String[] {};
}
@Override
public String[] getExcludes() {
return new String[] { "field2" };
}
});
List<Entity> entities = operations.multiGet(query, Entity.class);
assertThat(entities).hasSize(1);
Entity entity = entities.get(0);
assertThat(entity.getField1()).isNotNull();
assertThat(entity.getField2()).isNull();
assertThat(entity.getField3()).isNotNull();
}
@Test // #1659
@DisplayName("should only return included fields from SourceFilter on search")
void shouldOnlyReturnIncludedFieldsFromSourceFilterOnSearch() {
Query query = Query.findAll();
query.addSourceFilter(new SourceFilter() {
@Override
public String[] getIncludes() {
return new String[] { "field2" };
}
@Override
public String[] getExcludes() {
return new String[] {};
}
});
SearchHits<Entity> entities = operations.search(query, Entity.class);
assertThat(entities).hasSize(1);
Entity entity = entities.getSearchHit(0).getContent();
assertThat(entity.getField1()).isNull();
assertThat(entity.getField2()).isNotNull();
assertThat(entity.getField3()).isNull();
}
@Test // #1659
@DisplayName("should only return included fields from SourceFilter on multiget")
void shouldOnlyReturnIncludedFieldsFromSourceFilterOnMultiGet() {
Query query = new NativeSearchQueryBuilder().withIds(Collections.singleton("42")).build();
query.addSourceFilter(new SourceFilter() {
@Override
public String[] getIncludes() {
return new String[] { "field2" };
}
@Override
public String[] getExcludes() {
return new String[] {};
}
});
List<Entity> entities = operations.multiGet(query, Entity.class);
assertThat(entities).hasSize(1);
Entity entity = entities.get(0);
assertThat(entity.getField1()).isNull();
assertThat(entity.getField2()).isNotNull();
assertThat(entity.getField3()).isNull();
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Document(indexName = "sourcefilter-tests")
public static class Entity {
@Id private String id;
@Field(type = FieldType.Text) private String field1;
@Field(type = FieldType.Text) private String field2;
@Field(type = FieldType.Text) private String field3;
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright 2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch.core;
import org.springframework.data.elasticsearch.junit.jupiter.ElasticsearchTemplateConfiguration;
import org.springframework.test.context.ContextConfiguration;
/**
* @author Peter-Josef Meisch
*/
@ContextConfiguration(classes = { ElasticsearchTemplateConfiguration.class })
public class SourceFilterIntegrationTransportTests extends SourceFilterIntegrationTests {}

View File

@ -53,6 +53,9 @@ class GeoConvertersUnitTests {
@DisplayName("GeoJsonPoint")
class GeoJsonPointUnitTests {
// NOTE: the test converting from a map contains the type names in lowercase, that might be returned from
// Elasticsearch
@Test // DATAES-930
@DisplayName("should be converted to a Map")
void shouldBeConvertedToAMap() throws JSONException {
@ -75,7 +78,7 @@ class GeoConvertersUnitTests {
// make sure we can read int values as well
String json = "{\n" + //
" \"type\": \"Point\",\n" + //
" \"type\": \"point\",\n" + //
" \"coordinates\": [12, 34.0]\n" + //
"}"; //
@ -117,8 +120,14 @@ class GeoConvertersUnitTests {
void shouldBeConvertedFromAMap() {
// make sure we can read int values as well
String json = "{\n" + " \"type\": \"MultiPoint\",\n" + " \"coordinates\": [\n" + " [12.0, 34],\n"
+ " [56, 78.0]\n" + " ]\n" + "}\n";
String json = "{\n" + //
" \"type\": \"multipoint\",\n" //
+ " \"coordinates\": [\n" + //
" [12.0, 34],\n" + //
" [56, 78.0]\n" + //
" ]\n" + //
"}\n"; //
Document document = Document.parse(json);
GeoJsonMultiPoint expected = GeoJsonMultiPoint.of(new Point(12, 34), new Point(56, 78));
@ -158,7 +167,7 @@ class GeoConvertersUnitTests {
// make sure we can read int values as well
String json = "{\n" + //
" \"type\": \"LineString\",\n" + //
" \"type\": \"linestring\",\n" + //
" \"coordinates\": [\n" + //
" [12.0, 34],\n" + //
" [56, 78.0]\n" //
@ -205,7 +214,7 @@ class GeoConvertersUnitTests {
void shouldBeConvertedFromAMap() {
// make sure we can read int values as well
String json = "{\n" + //
" \"type\": \"MultiLineString\",\n" + //
" \"type\": \"multilinestring\",\n" + //
" \"coordinates\": [\n" + //
" [[12, 34.0], [56.0, 78]],\n" + //
" [[90.0, 12], [34, 56.0]]\n" + //
@ -256,7 +265,7 @@ class GeoConvertersUnitTests {
void shouldBeConvertedFromAMap() {
String json = "{\n" + //
" \"type\": \"Polygon\",\n" + //
" \"type\": \"polygon\",\n" + //
" \"coordinates\": [\n" + //
" [[12, 34.0], [56.0, 78], [90, 12.0], [12, 34.0]],\n" + //
" [[56.0, 78], [90, 12.0], [34.0, 56], [56.0, 78]]\n" + //
@ -308,7 +317,7 @@ class GeoConvertersUnitTests {
void shouldBeConvertedFromAMap() {
String json = "{\n" + //
" \"type\": \"MultiPolygon\",\n" + //
" \"type\": \"multipolygon\",\n" + //
" \"coordinates\": [\n" + //
" [[[12, 34.0], [56.0, 78], [90, 12.0], [12, 34.0]]],\n" + //
" [[[56, 78.0], [90, 12.0], [34.0, 56], [56, 78.0]]]\n" + //
@ -369,14 +378,14 @@ class GeoConvertersUnitTests {
void shouldBeConvertedFromAMap() {
String json = "{\n" + //
" \"type\": \"GeometryCollection\",\n" + //
" \"type\": \"geometrycollection\",\n" + //
" \"geometries\": [\n" + //
" {\n" + //
" \"type\": \"Point\",\n" + //
" \"type\": \"point\",\n" + //
" \"coordinates\": [12.0, 34.0]\n" + //
" },\n" + //
" {\n" + //
" \"type\": \"Polygon\",\n" + //
" \"type\": \"polygon\",\n" + //
" \"coordinates\": [\n" + //
" [[12.0, 34.0], [56.0, 78.0], [90.0, 12.0], [12.0, 34.0]]\n" + //
" ]\n" + //

View File

@ -0,0 +1,235 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch.core.event;
import static org.assertj.core.api.Assertions.*;
import static org.mockito.Mockito.*;
import lombok.Data;
import java.util.Collections;
import java.util.List;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.JoinTypeRelation;
import org.springframework.data.elasticsearch.annotations.JoinTypeRelations;
import org.springframework.data.elasticsearch.core.AbstractElasticsearchTemplate;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.IndexOperations;
import org.springframework.data.elasticsearch.core.join.JoinField;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.IndexQuery;
import org.springframework.data.elasticsearch.core.query.SeqNoPrimaryTerm;
import org.springframework.data.elasticsearch.junit.jupiter.SpringIntegrationTest;
import org.springframework.lang.Nullable;
import org.springframework.stereotype.Component;
/**
* @author Peter-Josef Meisch
* @author Roman Puchkovskiy
*/
@SpringIntegrationTest
abstract class ElasticsearchOperationsCallbackIntegrationTest {
private static final String INDEX = "test-operations-callback";
@Autowired private ElasticsearchOperations originalOperations;
// need a spy here on the abstract implementation class
private AbstractElasticsearchTemplate operations;
@Nullable private static SeqNoPrimaryTerm seqNoPrimaryTerm = null;
@Configuration
static class Config {
@Component
static class SampleEntityBeforeConvertCallback implements BeforeConvertCallback<SampleEntity> {
@Override
public SampleEntity onBeforeConvert(SampleEntity entity, IndexCoordinates index) {
entity.setText("converted");
JoinField<String> joinField = new JoinField<>("answer", "42");
entity.setJoinField(joinField);
if (seqNoPrimaryTerm != null) {
entity.setSeqNoPrimaryTerm(seqNoPrimaryTerm);
}
return entity;
}
}
}
@BeforeEach
void setUp() {
seqNoPrimaryTerm = null;
operations = (AbstractElasticsearchTemplate) spy(originalOperations);
IndexOperations indexOps = operations.indexOps(SampleEntity.class);
indexOps.delete();
indexOps.create();
indexOps.putMapping(SampleEntity.class);
// store one entity to have a seq_no and primary_term
final SampleEntity initial = new SampleEntity("1", "initial");
final SampleEntity saved = operations.save(initial);
seqNoPrimaryTerm = saved.getSeqNoPrimaryTerm();
}
@AfterEach
void tearDown() {
IndexOperations indexOps = operations.indexOps(SampleEntity.class);
indexOps.delete();
}
@Test // DATAES-68
void shouldCallBeforeConvertCallback() {
SampleEntity entity = new SampleEntity("1", "test");
SampleEntity saved = operations.save(entity);
assertThat(saved.getText()).isEqualTo("converted");
}
@Test // DATAES-972
@DisplayName("should apply conversion result to IndexQuery on save")
void shouldApplyConversionResultToIndexQueryOnSave() {
SampleEntity entity = new SampleEntity("1", "test");
operations.save(entity);
ArgumentCaptor<IndexQuery> indexQueryCaptor = ArgumentCaptor.forClass(IndexQuery.class);
verify(operations, times(2)).doIndex(indexQueryCaptor.capture(), any());
final IndexQuery capturedIndexQuery = indexQueryCaptor.getValue();
SampleEntity convertedEntity = (SampleEntity) capturedIndexQuery.getObject();
final JoinField<String> joinField = convertedEntity.getJoinField();
assertThat(joinField.getName()).isEqualTo("answer");
assertThat(joinField.getParent()).isEqualTo("42");
assertThat(capturedIndexQuery.getRouting()).isEqualTo("42");
assertThat(capturedIndexQuery.getSeqNo()).isEqualTo(seqNoPrimaryTerm.getSequenceNumber());
assertThat(capturedIndexQuery.getPrimaryTerm()).isEqualTo(seqNoPrimaryTerm.getPrimaryTerm());
}
@Test // DATAES-972
@DisplayName("should apply conversion result to IndexQuery when not set ")
void shouldApplyConversionResultToIndexQueryWhenNotSet() {
SampleEntity entity = new SampleEntity("1", "test");
final IndexQuery indexQuery = new IndexQuery();
indexQuery.setId(entity.getId());
indexQuery.setObject(entity);
operations.index(indexQuery, IndexCoordinates.of(INDEX));
ArgumentCaptor<IndexQuery> indexQueryCaptor = ArgumentCaptor.forClass(IndexQuery.class);
verify(operations, times(2)).doIndex(indexQueryCaptor.capture(), any());
final IndexQuery capturedIndexQuery = indexQueryCaptor.getValue();
SampleEntity convertedEntity = (SampleEntity) capturedIndexQuery.getObject();
final JoinField<String> joinField = convertedEntity.getJoinField();
assertThat(joinField.getName()).isEqualTo("answer");
assertThat(joinField.getParent()).isEqualTo("42");
assertThat(capturedIndexQuery.getRouting()).isEqualTo("42");
assertThat(capturedIndexQuery.getSeqNo()).isEqualTo(seqNoPrimaryTerm.getSequenceNumber());
assertThat(capturedIndexQuery.getPrimaryTerm()).isEqualTo(seqNoPrimaryTerm.getPrimaryTerm());
}
@Test // DATAES-972
@DisplayName("should not apply conversion result to IndexQuery when already set ")
void shouldNotApplyConversionResultToIndexQueryWhenAlreadySet() {
SeqNoPrimaryTerm seqNoPrimaryTermOriginal = seqNoPrimaryTerm;
seqNoPrimaryTerm = new SeqNoPrimaryTerm(7, 8);
SampleEntity entity = new SampleEntity("1", "test");
final IndexQuery indexQuery = new IndexQuery();
indexQuery.setId(entity.getId());
indexQuery.setObject(entity);
indexQuery.setRouting("12");
indexQuery.setSeqNo(seqNoPrimaryTermOriginal.getSequenceNumber());
indexQuery.setPrimaryTerm(seqNoPrimaryTermOriginal.getPrimaryTerm());
operations.index(indexQuery, IndexCoordinates.of(INDEX));
ArgumentCaptor<IndexQuery> indexQueryCaptor = ArgumentCaptor.forClass(IndexQuery.class);
verify(operations, times(2)).doIndex(indexQueryCaptor.capture(), any());
final IndexQuery capturedIndexQuery = indexQueryCaptor.getValue();
SampleEntity convertedEntity = (SampleEntity) capturedIndexQuery.getObject();
final JoinField<String> joinField = convertedEntity.getJoinField();
assertThat(joinField.getName()).isEqualTo("answer");
assertThat(joinField.getParent()).isEqualTo("42");
assertThat(capturedIndexQuery.getRouting()).isEqualTo("12");
assertThat(capturedIndexQuery.getSeqNo()).isEqualTo(seqNoPrimaryTermOriginal.getSequenceNumber());
assertThat(capturedIndexQuery.getPrimaryTerm()).isEqualTo(seqNoPrimaryTermOriginal.getPrimaryTerm());
}
@Test // DATAES-972
@DisplayName("should apply conversion result to IndexQuery in bulkIndex")
void shouldApplyConversionResultToIndexQueryInBulkIndex() {
SampleEntity entity = new SampleEntity("1", "test");
final IndexQuery indexQuery = new IndexQuery();
indexQuery.setId(entity.getId());
indexQuery.setObject(entity);
operations.bulkIndex(Collections.singletonList(indexQuery), SampleEntity.class);
ArgumentCaptor<List<IndexQuery>> indexQueryListCaptor = ArgumentCaptor.forClass(List.class);
verify(operations, times(1)).bulkOperation(indexQueryListCaptor.capture(), any(), any());
final List<IndexQuery> capturedIndexQueries = indexQueryListCaptor.getValue();
assertThat(capturedIndexQueries).hasSize(1);
final IndexQuery capturedIndexQuery = capturedIndexQueries.get(0);
SampleEntity convertedEntity = (SampleEntity) capturedIndexQuery.getObject();
final JoinField<String> joinField = convertedEntity.getJoinField();
assertThat(joinField.getName()).isEqualTo("answer");
assertThat(joinField.getParent()).isEqualTo("42");
assertThat(capturedIndexQuery.getRouting()).isEqualTo("42");
assertThat(capturedIndexQuery.getSeqNo()).isEqualTo(seqNoPrimaryTerm.getSequenceNumber());
assertThat(capturedIndexQuery.getPrimaryTerm()).isEqualTo(seqNoPrimaryTerm.getPrimaryTerm());
}
@Data
@Document(indexName = INDEX)
static class SampleEntity {
@Id private String id;
private String text;
@JoinTypeRelations(relations = { @JoinTypeRelation(parent = "question",
children = { "answer" }) }) @Nullable private JoinField<String> joinField;
private SeqNoPrimaryTerm seqNoPrimaryTerm;
public SampleEntity(String id, String text) {
this.id = id;
this.text = text;
}
}
}

View File

@ -1,102 +0,0 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch.core.event;
import static org.assertj.core.api.Assertions.*;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.IndexOperations;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.stereotype.Component;
/**
* @author Peter-Josef Meisch
* @author Roman Puchkovskiy
*/
abstract class ElasticsearchOperationsCallbackTest {
@Autowired private ElasticsearchOperations operations;
@Configuration
static class Config {
@Component
static class SampleEntityBeforeConvertCallback implements BeforeConvertCallback<SampleEntity> {
@Override
public SampleEntity onBeforeConvert(SampleEntity entity, IndexCoordinates index) {
entity.setText("converted");
return entity;
}
}
}
@BeforeEach
void setUp() {
IndexOperations indexOps = operations.indexOps(SampleEntity.class);
indexOps.delete();
indexOps.create();
indexOps.putMapping(SampleEntity.class);
}
@AfterEach
void tearDown() {
IndexOperations indexOps = operations.indexOps(SampleEntity.class);
indexOps.delete();
}
@Test
void shouldCallBeforeConvertCallback() {
SampleEntity entity = new SampleEntity("1", "test");
SampleEntity saved = operations.save(entity);
assertThat(saved.getText()).isEqualTo("converted");
}
@Document(indexName = "test-operations-callback")
static class SampleEntity {
@Id private String id;
private String text;
public SampleEntity(String id, String text) {
this.id = id;
this.text = text;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
}
}

View File

@ -16,12 +16,11 @@
package org.springframework.data.elasticsearch.core.event;
import org.springframework.data.elasticsearch.junit.jupiter.ElasticsearchRestTemplateConfiguration;
import org.springframework.data.elasticsearch.junit.jupiter.SpringIntegrationTest;
import org.springframework.test.context.ContextConfiguration;
/**
* @author Peter-Josef Meisch
*/
@SpringIntegrationTest
@ContextConfiguration(classes = { ElasticsearchRestTemplateConfiguration.class, ElasticsearchOperationsCallbackTest.Config.class })
class ElasticsearchRestOperationsCallbackTest extends ElasticsearchOperationsCallbackTest {}
@ContextConfiguration(classes = { ElasticsearchRestTemplateConfiguration.class,
ElasticsearchOperationsCallbackIntegrationTest.Config.class })
class ElasticsearchRestOperationsCallbackIntegrationTest extends ElasticsearchOperationsCallbackIntegrationTest {}

View File

@ -16,12 +16,10 @@
package org.springframework.data.elasticsearch.core.event;
import org.springframework.data.elasticsearch.junit.jupiter.ElasticsearchTemplateConfiguration;
import org.springframework.data.elasticsearch.junit.jupiter.SpringIntegrationTest;
import org.springframework.test.context.ContextConfiguration;
/**
* @author Peter-Josef Meisch
*/
@SpringIntegrationTest
@ContextConfiguration(classes = { ElasticsearchTemplateConfiguration.class, ElasticsearchOperationsCallbackTest.Config.class })
class ElasticsearchTransportOperationsCallbackTest extends ElasticsearchOperationsCallbackTest {}
@ContextConfiguration(classes = { ElasticsearchTemplateConfiguration.class, ElasticsearchOperationsCallbackIntegrationTest.Config.class })
class ElasticsearchTransportOperationsCallbackIntegrationTest extends ElasticsearchOperationsCallbackIntegrationTest {}

View File

@ -1,5 +1,5 @@
/*
* Copyright 2013-2020 the original author or authors.
* Copyright 2013-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -30,6 +30,7 @@ import lombok.NoArgsConstructor;
import lombok.Setter;
import java.lang.Integer;
import java.lang.Object;
import java.math.BigDecimal;
import java.util.Collection;
import java.util.Collections;
@ -45,6 +46,7 @@ import org.assertj.core.data.Percentage;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.annotation.Id;
@ -249,9 +251,29 @@ public class MappingBuilderIntegrationTests extends MappingContextBaseTests {
assertThat(fieldLastName.get("copy_to")).isEqualTo(copyToValue);
}
@Test // DATAES-991
@DisplayName("should write correct TermVector values")
void shouldWriteCorrectTermVectorValues() {
IndexOperations indexOps = operations.indexOps(TermVectorFieldEntity.class);
indexOps.create();
indexOps.putMapping();
}
/**
* @author Xiao Yu
*/
@Test // #1767
@DisplayName("should write dynamic mapping entries")
void shouldWriteDynamicMappingEntries() {
IndexOperations indexOps = operations.indexOps(DynamicMappingEntity.class);
indexOps.create();
indexOps.putMapping();
indexOps.delete();
}
@Setter
@Getter
@NoArgsConstructor
@ -621,4 +643,40 @@ public class MappingBuilderIntegrationTests extends MappingContextBaseTests {
@Field(type = FieldType.Rank_Features) private Map<String, Integer> topics;
}
@Data
@Document(indexName = "termvectors-test")
static class TermVectorFieldEntity {
@Id private String id;
@Field(type = FieldType.Text, termVector = TermVector.no) private String no;
@Field(type = FieldType.Text, termVector = TermVector.yes) private String yes;
@Field(type = FieldType.Text, termVector = TermVector.with_positions) private String with_positions;
@Field(type = FieldType.Text, termVector = TermVector.with_offsets) private String with_offsets;
@Field(type = FieldType.Text, termVector = TermVector.with_positions_offsets) private String with_positions_offsets;
@Field(type = FieldType.Text,
termVector = TermVector.with_positions_payloads) private String with_positions_payloads;
@Field(type = FieldType.Text,
termVector = TermVector.with_positions_offsets_payloads) private String with_positions_offsets_payloads;
}
@Document(indexName = "dynamic-mapping")
@DynamicMapping(DynamicMappingValue.False)
static class DynamicMappingEntity {
@Nullable @DynamicMapping(DynamicMappingValue.Strict) @Field(type = FieldType.Object) private Author author;
@Nullable @DynamicMapping(DynamicMappingValue.False) @Field(
type = FieldType.Object) private Map<String, Object> objectMap;
@Nullable @DynamicMapping(DynamicMappingValue.False) @Field(
type = FieldType.Nested) private List<Map<String, Object>> nestedObjectMap;
@Nullable
public Author getAuthor() {
return author;
}
public void setAuthor(Author author) {
this.author = author;
}
}
}

View File

@ -37,6 +37,7 @@ import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -410,19 +411,28 @@ public class MappingBuilderUnitTests extends MappingContextBaseTests {
assertEquals(expected, mapping, true);
}
@Test
@Test // DATAES-148, #1767
void shouldWriteDynamicMappingSettings() throws JSONException {
String expected = "{\n" + //
" \"dynamic\": \"false\",\n" + //
" \"properties\": {\n" + //
" \"author\": {\n" + //
" \"dynamic\": \"strict\",\n" + //
" \"type\": \"object\",\n" + //
" \"properties\": {}\n" + //
" \"dynamic\": \"strict\",\n" + //
" \"properties\": {\n" + //
" }\n" + //
" },\n" + //
" \"objectMap\": {\n" + //
" \"type\": \"object\",\n" + //
" \"dynamic\": \"false\"\n" + //
" },\n" + //
" \"nestedObjectMap\": {\n" + //
" \"type\": \"nested\",\n" + //
" \"dynamic\": \"false\"\n" + //
" }\n" + //
" }\n" + //
"}\n";
"}"; //
String mapping = getMappingBuilder().buildPropertyMapping(ConfigureDynamicMappingEntity.class);
@ -898,6 +908,10 @@ public class MappingBuilderUnitTests extends MappingContextBaseTests {
static class ConfigureDynamicMappingEntity {
@Nullable @DynamicMapping(DynamicMappingValue.Strict) @Field(type = FieldType.Object) private Author author;
@Nullable @DynamicMapping(DynamicMappingValue.False) @Field(
type = FieldType.Object) private Map<String, Object> objectMap;
@Nullable @DynamicMapping(DynamicMappingValue.False) @Field(
type = FieldType.Nested) private List<Map<String, Object>> nestedObjectMap;
@Nullable
public Author getAuthor() {

View File

@ -34,8 +34,9 @@ abstract class MappingContextBaseTests {
private SimpleElasticsearchMappingContext setupMappingContext() {
SimpleElasticsearchMappingContext mappingContext = new ElasticsearchConfigurationSupport()
.elasticsearchMappingContext();
ElasticsearchConfigurationSupport configurationSupport = new ElasticsearchConfigurationSupport();
SimpleElasticsearchMappingContext mappingContext = configurationSupport
.elasticsearchMappingContext(configurationSupport.elasticsearchCustomConversions());
mappingContext.initialize();
return mappingContext;
}

View File

@ -0,0 +1,155 @@
/*
* Copyright 2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch.core.mapping;
import static org.assertj.core.api.Assertions.*;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.Map;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.convert.ReadingConverter;
import org.springframework.data.convert.WritingConverter;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.IndexOperations;
import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.data.elasticsearch.core.convert.ElasticsearchCustomConversions;
import org.springframework.data.elasticsearch.core.geo.GeoJsonPoint;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.data.elasticsearch.junit.jupiter.ElasticsearchRestTemplateConfiguration;
import org.springframework.data.elasticsearch.junit.jupiter.SpringIntegrationTest;
import org.springframework.data.elasticsearch.repository.config.EnableElasticsearchRepositories;
import org.springframework.test.context.ContextConfiguration;
/**
* Test that a whole entity can be converted using custom conversions
*
* @author Peter-Josef Meisch
*/
@SpringIntegrationTest
@ContextConfiguration(classes = { EntityCustomConversionIntegrationTests.Config.class })
public class EntityCustomConversionIntegrationTests {
@Configuration
@EnableElasticsearchRepositories(basePackages = { "org.springframework.data.elasticsearch.core.mapping" },
considerNestedRepositories = true)
static class Config extends ElasticsearchRestTemplateConfiguration {
@Override
public ElasticsearchCustomConversions elasticsearchCustomConversions() {
return new ElasticsearchCustomConversions(Arrays.asList(new EntityToMapConverter(), new MapToEntityConverter()));
}
}
@Autowired private ElasticsearchOperations operations;
@BeforeEach
void setUp() {
IndexOperations indexOps = operations.indexOps(Entity.class);
indexOps.create();
indexOps.putMapping();
}
@AfterEach
void tearDown() {
operations.indexOps(Entity.class).delete();
}
@Test // #1667
@DisplayName("should use CustomConversions on entity")
void shouldUseCustomConversionsOnEntity() {
Entity entity = Entity.builder() //
.value("hello") //
.location(GeoJsonPoint.of(8.0, 42.7)) //
.build();
org.springframework.data.elasticsearch.core.document.Document document = org.springframework.data.elasticsearch.core.document.Document
.create();
operations.getElasticsearchConverter().write(entity, document);
assertThat(document.getString("the_value")).isEqualTo("hello");
assertThat(document.getString("the_lon")).isEqualTo("8.0");
assertThat(document.getString("the_lat")).isEqualTo("42.7");
}
@Test // #1667
@DisplayName("should store and load entity from Elasticsearch")
void shouldStoreAndLoadEntityFromElasticsearch() {
Entity entity = Entity.builder() //
.value("hello") //
.location(GeoJsonPoint.of(8.0, 42.7)) //
.build();
Entity savedEntity = operations.save(entity);
operations.indexOps(Entity.class).refresh();
SearchHits<Entity> searchHits = operations.search(Query.findAll(), Entity.class);
assertThat(searchHits.getTotalHits()).isEqualTo(1);
Entity foundEntity = searchHits.getSearchHit(0).getContent();
assertThat(foundEntity).isEqualTo(entity);
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Document(indexName = "entity-with-custom-conversions")
static class Entity {
private String value;
private GeoJsonPoint location;
}
@WritingConverter
static class EntityToMapConverter implements Converter<Entity, Map<String, Object>> {
@Override
public Map<String, Object> convert(Entity source) {
LinkedHashMap<String, Object> target = new LinkedHashMap<>();
target.put("the_value", source.getValue());
target.put("the_lat", "" + source.getLocation().getY());
target.put("the_lon", "" + source.getLocation().getX());
return target;
}
}
@ReadingConverter
static class MapToEntityConverter implements Converter<Map<String, Object>, Entity> {
@Override
public Entity convert(Map<String, Object> source) {
Entity entity = new Entity();
entity.setValue((String) source.get("the_value"));
entity.setLocation(GeoJsonPoint.of( //
Double.parseDouble((String) (source.get("the_lon"))), //
Double.parseDouble((String) (source.get("the_lat"))) //
));
return entity;
}
}
}

View File

@ -192,20 +192,6 @@ public class SimpleElasticsearchPersistentPropertyUnitTests {
assertThat(seqNoProperty.isReadable()).isFalse();
}
@Test // DATAES-828
void shouldRequireFormatForDateField() {
assertThatExceptionOfType(MappingException.class) //
.isThrownBy(() -> context.getRequiredPersistentEntity(DateFieldWithNoFormat.class)) //
.withMessageContaining("date");
}
@Test // DATAES-828
void shouldRequireFormatForDateNanosField() {
assertThatExceptionOfType(MappingException.class) //
.isThrownBy(() -> context.getRequiredPersistentEntity(DateNanosFieldWithNoFormat.class)) //
.withMessageContaining("date");
}
@Test // DATAES-924
@DisplayName("should require pattern for custom date format")
void shouldRequirePatternForCustomDateFormat() {

View File

@ -68,9 +68,9 @@ public class ElasticsearchRestTemplateConfiguration extends AbstractElasticsearc
}
@Override
public ElasticsearchOperations elasticsearchOperations(ElasticsearchConverter elasticsearchConverter) {
RestHighLevelClient client = elasticsearchClient();
return new ElasticsearchRestTemplate(client, elasticsearchConverter) {
public ElasticsearchOperations elasticsearchOperations(ElasticsearchConverter elasticsearchConverter,
RestHighLevelClient elasticsearchClient) {
return new ElasticsearchRestTemplate(elasticsearchClient, elasticsearchConverter) {
@Override
public <T> T execute(ClientCallback<T> callback) {
try {

View File

@ -1,5 +1,5 @@
/*
* Copyright 2013-2020 the original author or authors.
* Copyright 2013-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -151,6 +151,7 @@ public abstract class CustomMethodRepositoryBaseTests {
// then
assertThat(page).isNotNull();
assertThat(page.getTotalElements()).isGreaterThanOrEqualTo(1L);
assertThat(page.getContent().get(0)).isInstanceOf(SampleEntity.class);
}
@Test
@ -1570,6 +1571,35 @@ public abstract class CustomMethodRepositoryBaseTests {
assertThat((nextPageable.getPageNumber())).isEqualTo(1);
}
@Test // #1811
void shouldReturnSearchPageWithQuery() {
List<SampleEntity> entities = createSampleEntities("abc", 20);
repository.saveAll(entities);
SearchPage<SampleEntity> searchPage = repository.searchWithQueryByMessage("Message", PageRequest.of(0, 10));
assertThat(searchPage).isNotNull();
SearchHits<SampleEntity> searchHits = searchPage.getSearchHits();
assertThat(searchHits).isNotNull();
assertThat((searchHits.getTotalHits())).isEqualTo(20);
assertThat(searchHits.getSearchHits()).hasSize(10);
Pageable nextPageable = searchPage.nextPageable();
assertThat((nextPageable.getPageNumber())).isEqualTo(1);
}
@Test // #1917
void shouldReturnAllDocumentsWithUnpagedQuery() {
List<SampleEntity> entities = createSampleEntities("abc", 20);
repository.saveAll(entities);
SearchHits<SampleEntity> searchHits = repository.searchWithQueryByMessageUnpaged("Message");
assertThat(searchHits).isNotNull();
assertThat((searchHits.getTotalHits())).isEqualTo(20);
assertThat(searchHits.getSearchHits()).hasSize(20);
}
private List<SampleEntity> createSampleEntities(String type, int numberOfEntities) {
List<SampleEntity> entities = new ArrayList<>();
@ -1746,6 +1776,12 @@ public abstract class CustomMethodRepositoryBaseTests {
SearchHits<SampleEntity> searchBy(Sort sort);
SearchPage<SampleEntity> searchByMessage(String message, Pageable pageable);
@Query("{\"match\": {\"message\": \"?0\"}}")
SearchPage<SampleEntity> searchWithQueryByMessage(String message, Pageable pageable);
@Query("{\"match\": {\"message\": \"?0\"}}")
SearchHits<SampleEntity> searchWithQueryByMessageUnpaged(String message);
}
/**

View File

@ -0,0 +1,78 @@
/*
* Copyright 2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch.repository.query;
import java.util.ArrayList;
import java.util.Collection;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.convert.CustomConversions;
import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter;
import org.springframework.data.elasticsearch.core.convert.ElasticsearchCustomConversions;
import org.springframework.data.elasticsearch.core.convert.MappingElasticsearchConverter;
import org.springframework.data.elasticsearch.core.mapping.SimpleElasticsearchMappingContext;
import org.springframework.lang.Nullable;
/**
* @author Peter-Josef Meisch
*/
public class ElasticsearchStringQueryUnitTestBase {
protected ElasticsearchConverter setupConverter() {
MappingElasticsearchConverter converter = new MappingElasticsearchConverter(
new SimpleElasticsearchMappingContext());
Collection<Converter<?, ?>> converters = new ArrayList<>();
converters.add(ElasticsearchStringQueryUnitTests.CarConverter.INSTANCE);
CustomConversions customConversions = new ElasticsearchCustomConversions(converters);
converter.setConversions(customConversions);
converter.afterPropertiesSet();
return converter;
}
static class Car {
@Nullable private String name;
@Nullable private String model;
@Nullable
public String getName() {
return name;
}
public void setName(@Nullable String name) {
this.name = name;
}
@Nullable
public String getModel() {
return model;
}
public void setModel(@Nullable String model) {
this.model = model;
}
}
enum CarConverter implements Converter<Car, String> {
INSTANCE;
@Override
public String convert(ElasticsearchStringQueryUnitTests.Car car) {
return (car.getName() != null ? car.getName() : "null") + '-'
+ (car.getModel() != null ? car.getModel() : "null");
}
}
}

View File

@ -16,6 +16,7 @@
package org.springframework.data.elasticsearch.repository.query;
import static org.assertj.core.api.Assertions.*;
import static org.mockito.Mockito.*;
import lombok.AllArgsConstructor;
import lombok.Builder;
@ -24,6 +25,7 @@ import lombok.NoArgsConstructor;
import lombok.Setter;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
@ -31,6 +33,7 @@ import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
@ -43,9 +46,7 @@ import org.springframework.data.elasticsearch.annotations.InnerField;
import org.springframework.data.elasticsearch.annotations.MultiField;
import org.springframework.data.elasticsearch.annotations.Query;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter;
import org.springframework.data.elasticsearch.core.convert.MappingElasticsearchConverter;
import org.springframework.data.elasticsearch.core.mapping.SimpleElasticsearchMappingContext;
import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.data.elasticsearch.core.query.StringQuery;
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
import org.springframework.data.repository.Repository;
@ -55,16 +56,16 @@ import org.springframework.lang.Nullable;
/**
* @author Christoph Strobl
* @author Peter-Josef Meisch
* @author Niklas Herder
*/
@ExtendWith(MockitoExtension.class)
public class ElasticsearchStringQueryUnitTests {
public class ElasticsearchStringQueryUnitTests extends ElasticsearchStringQueryUnitTestBase {
@Mock ElasticsearchOperations operations;
ElasticsearchConverter converter;
@BeforeEach
public void setUp() {
converter = new MappingElasticsearchConverter(new SimpleElasticsearchMappingContext());
when(operations.getElasticsearchConverter()).thenReturn(setupConverter());
}
@Test // DATAES-552
@ -88,7 +89,53 @@ public class ElasticsearchStringQueryUnitTests {
.isEqualTo("name:(zero, eleven, one, two, three, four, five, six, seven, eight, nine, ten, eleven, zero, one)");
}
private org.springframework.data.elasticsearch.core.query.Query createQuery(String methodName, String... args)
@Test // #1790
@DisplayName("should escape Strings in query parameters")
void shouldEscapeStringsInQueryParameters() throws Exception {
org.springframework.data.elasticsearch.core.query.Query query = createQuery("findByPrefix", "hello \"Stranger\"");
assertThat(query).isInstanceOf(StringQuery.class);
assertThat(((StringQuery) query).getSource())
.isEqualTo("{\"bool\":{\"must\": [{\"match\": {\"prefix\": {\"name\" : \"hello \\\"Stranger\\\"\"}}]}}");
}
@Test // #1858
@DisplayName("should only quote String query parameters")
void shouldOnlyEscapeStringQueryParameters() throws Exception {
org.springframework.data.elasticsearch.core.query.Query query = createQuery("findByAge", Integer.valueOf(30));
assertThat(query).isInstanceOf(StringQuery.class);
assertThat(((StringQuery) query).getSource()).isEqualTo("{ 'bool' : { 'must' : { 'term' : { 'age' : 30 } } } }");
}
@Test // #1858
@DisplayName("should only quote String collection query parameters")
void shouldOnlyEscapeStringCollectionQueryParameters() throws Exception {
org.springframework.data.elasticsearch.core.query.Query query = createQuery("findByAgeIn",
new ArrayList<>(Arrays.asList(30, 35, 40)));
assertThat(query).isInstanceOf(StringQuery.class);
assertThat(((StringQuery) query).getSource())
.isEqualTo("{ 'bool' : { 'must' : { 'term' : { 'age' : [30,35,40] } } } }");
}
@Test // #1858
@DisplayName("should escape Strings in collection query parameters")
void shouldEscapeStringsInCollectionsQueryParameters() throws Exception {
final List<String> another_string = Arrays.asList("hello \"Stranger\"", "Another string");
List<String> params = new ArrayList<>(another_string);
org.springframework.data.elasticsearch.core.query.Query query = createQuery("findByNameIn", params);
assertThat(query).isInstanceOf(StringQuery.class);
assertThat(((StringQuery) query).getSource()).isEqualTo(
"{ 'bool' : { 'must' : { 'terms' : { 'name' : [\"hello \\\"Stranger\\\"\",\"Another string\"] } } } }");
}
private org.springframework.data.elasticsearch.core.query.Query createQuery(String methodName, Object... args)
throws NoSuchMethodException {
Class<?>[] argTypes = Arrays.stream(args).map(Object::getClass).toArray(Class[]::new);
@ -97,6 +144,21 @@ public class ElasticsearchStringQueryUnitTests {
return elasticsearchStringQuery.createQuery(new ElasticsearchParametersParameterAccessor(queryMethod, args));
}
@Test // #1866
@DisplayName("should use converter on parameters")
void shouldUseConverterOnParameters() throws NoSuchMethodException {
Car car = new Car();
car.setName("Toyota");
car.setModel("Prius");
org.springframework.data.elasticsearch.core.query.Query query = createQuery("findByCar", car);
assertThat(query).isInstanceOf(StringQuery.class);
assertThat(((StringQuery) query).getSource())
.isEqualTo("{ 'bool' : { 'must' : { 'term' : { 'car' : 'Toyota-Prius' } } } }");
}
private ElasticsearchStringQuery queryForMethod(ElasticsearchQueryMethod queryMethod) {
return new ElasticsearchStringQuery(queryMethod, operations, queryMethod.getAnnotatedQuery());
}
@ -105,33 +167,59 @@ public class ElasticsearchStringQueryUnitTests {
Method method = SampleRepository.class.getMethod(name, parameters);
return new ElasticsearchQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class),
new SpelAwareProxyProjectionFactory(), converter.getMappingContext());
new SpelAwareProxyProjectionFactory(), operations.getElasticsearchConverter().getMappingContext());
}
private interface SampleRepository extends Repository<Person, String> {
@Query("{ 'bool' : { 'must' : { 'term' : { 'age' : ?0 } } } }")
List<Person> findByAge(Integer age);
@Query("{ 'bool' : { 'must' : { 'term' : { 'age' : ?0 } } } }")
List<Person> findByAgeIn(ArrayList<Integer> age);
@Query("{ 'bool' : { 'must' : { 'term' : { 'name' : '?0' } } } }")
Person findByName(String name);
@Query("{ 'bool' : { 'must' : { 'terms' : { 'name' : ?0 } } } }")
Person findByNameIn(ArrayList<String> names);
@Query(value = "name:(?0, ?11, ?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?0, ?1)")
Person findWithRepeatedPlaceholder(String arg0, String arg1, String arg2, String arg3, String arg4, String arg5,
String arg6, String arg7, String arg8, String arg9, String arg10, String arg11);
@Query("{\"bool\":{\"must\": [{\"match\": {\"prefix\": {\"name\" : \"?0\"}}]}}")
SearchHits<Book> findByPrefix(String prefix);
@Query("{ 'bool' : { 'must' : { 'term' : { 'car' : '?0' } } } }")
Person findByCar(Car car);
}
/**
* @author Rizwan Idrees
* @author Mohsin Husen
* @author Artur Konczak
* @author Niklas Herder
*/
@Document(indexName = "test-index-person-query-unittest", replicas = 0, refreshInterval = "-1")
static class Person {
@Nullable public int age;
@Nullable @Id private String id;
@Nullable private String name;
@Nullable @Field(type = FieldType.Nested) private List<Car> car;
@Nullable @Field(type = FieldType.Nested, includeInParent = true) private List<Book> books;
@Nullable
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
@Nullable
public String getId() {
return id;
@ -191,38 +279,6 @@ public class ElasticsearchStringQueryUnitTests {
searchAnalyzer = "standard") }) private String description;
}
/**
* @author Rizwan Idrees
* @author Mohsin Husen
* @author Artur Konczak
*/
@Setter
@Getter
@NoArgsConstructor
@AllArgsConstructor
@Builder
static class Car {
private String name;
private String model;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
}
/**
* @author Rizwan Idrees
* @author Mohsin Husen

View File

@ -1,5 +1,5 @@
/*
* Copyright 2019-2020 the original author or authors.
* Copyright 2019-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -16,6 +16,7 @@
package org.springframework.data.elasticsearch.repository.query;
import static org.assertj.core.api.Assertions.*;
import static org.mockito.Mockito.*;
import lombok.AllArgsConstructor;
import lombok.Builder;
@ -34,6 +35,7 @@ import java.util.Map;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
@ -46,9 +48,7 @@ import org.springframework.data.elasticsearch.annotations.InnerField;
import org.springframework.data.elasticsearch.annotations.MultiField;
import org.springframework.data.elasticsearch.annotations.Query;
import org.springframework.data.elasticsearch.core.ReactiveElasticsearchOperations;
import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter;
import org.springframework.data.elasticsearch.core.convert.MappingElasticsearchConverter;
import org.springframework.data.elasticsearch.core.mapping.SimpleElasticsearchMappingContext;
import org.springframework.data.elasticsearch.core.SearchHit;
import org.springframework.data.elasticsearch.core.query.StringQuery;
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
import org.springframework.data.repository.Repository;
@ -62,16 +62,15 @@ import org.springframework.lang.Nullable;
* @author Peter-Josef Meisch
*/
@ExtendWith(MockitoExtension.class)
public class ReactiveElasticsearchStringQueryUnitTests {
public class ReactiveElasticsearchStringQueryUnitTests extends ElasticsearchStringQueryUnitTestBase {
SpelExpressionParser PARSER = new SpelExpressionParser();
ElasticsearchConverter converter;
@Mock ReactiveElasticsearchOperations operations;
@BeforeEach
public void setUp() {
converter = new MappingElasticsearchConverter(new SimpleElasticsearchMappingContext());
when(operations.getElasticsearchConverter()).thenReturn(setupConverter());
}
@Test // DATAES-519
@ -124,7 +123,33 @@ public class ReactiveElasticsearchStringQueryUnitTests {
.isEqualTo("name:(zero, eleven, one, two, three, four, five, six, seven, eight, nine, ten, eleven, zero, one)");
}
private org.springframework.data.elasticsearch.core.query.Query createQuery(String methodName, String... args)
@Test // #1790
@DisplayName("should escape Strings in query parameters")
void shouldEscapeStringsInQueryParameters() throws Exception {
org.springframework.data.elasticsearch.core.query.Query query = createQuery("findByPrefix", "hello \"Stranger\"");
assertThat(query).isInstanceOf(StringQuery.class);
assertThat(((StringQuery) query).getSource())
.isEqualTo("{\"bool\":{\"must\": [{\"match\": {\"prefix\": {\"name\" : \"hello \\\"Stranger\\\"\"}}]}}");
}
@Test // #1866
@DisplayName("should use converter on parameters")
void shouldUseConverterOnParameters() throws Exception {
Car car = new Car();
car.setName("Toyota");
car.setModel("Prius");
org.springframework.data.elasticsearch.core.query.Query query = createQuery("findByCar", car);
assertThat(query).isInstanceOf(StringQuery.class);
assertThat(((StringQuery) query).getSource())
.isEqualTo("{ 'bool' : { 'must' : { 'term' : { 'car' : 'Toyota-Prius' } } } }");
}
private org.springframework.data.elasticsearch.core.query.Query createQuery(String methodName, Object... args)
throws NoSuchMethodException {
Class<?>[] argTypes = Arrays.stream(args).map(Object::getClass).toArray(Class[]::new);
@ -144,7 +169,7 @@ public class ReactiveElasticsearchStringQueryUnitTests {
Method method = SampleRepository.class.getMethod(name, parameters);
return new ReactiveElasticsearchQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class),
new SpelAwareProxyProjectionFactory(), converter.getMappingContext());
new SpelAwareProxyProjectionFactory(), operations.getElasticsearchConverter().getMappingContext());
}
private ReactiveElasticsearchStringQuery createQueryForMethod(String name, Class<?>... parameters) throws Exception {
@ -168,6 +193,13 @@ public class ReactiveElasticsearchStringQueryUnitTests {
@Query(value = "name:(?0, ?11, ?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?0, ?1)")
Person findWithRepeatedPlaceholder(String arg0, String arg1, String arg2, String arg3, String arg4, String arg5,
String arg6, String arg7, String arg8, String arg9, String arg10, String arg11);
@Query("{\"bool\":{\"must\": [{\"match\": {\"prefix\": {\"name\" : \"?0\"}}]}}")
Flux<SearchHit<Book>> findByPrefix(String prefix);
@Query("{ 'bool' : { 'must' : { 'term' : { 'car' : '?0' } } } }")
Mono<Person> findByCar(Car car);
}
/**
@ -246,42 +278,6 @@ public class ReactiveElasticsearchStringQueryUnitTests {
searchAnalyzer = "standard") }) private String description;
}
/**
* @author Rizwan Idrees
* @author Mohsin Husen
* @author Artur Konczak
*/
@Setter
@Getter
@NoArgsConstructor
@AllArgsConstructor
@Builder
static class Car {
private String name;
private String model;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
}
/**
* @author Rizwan Idrees
* @author Mohsin Husen
*/
static class Author {
@Nullable private String id;

View File

@ -31,6 +31,7 @@ import java.lang.Boolean;
import java.lang.Long;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.elasticsearch.ElasticsearchStatusException;
@ -515,16 +516,38 @@ public class SimpleReactiveElasticsearchRepositoryTests {
}
@Test // DATAES-519
public void annotatedFinderMethodShouldBeExecutedCorrectly() {
void annotatedFinderMethodShouldBeExecutedCorrectly() {
bulkIndex(SampleEntity.builder().id("id-one").message("message").build(), //
SampleEntity.builder().id("id-two").message("test message").build(), //
SampleEntity.builder().id("id-three").message("test test").build()) //
.block();
int count = 30;
SampleEntity[] sampleEntities = IntStream.range(1, count + 1)
.mapToObj(i -> SampleEntity.builder().id("id-" + i).message("test " + i).build()).collect(Collectors.toList())
.toArray(new SampleEntity[count]);
bulkIndex(sampleEntities).block();
repository.findAllViaAnnotatedQueryByMessageLike("test") //
.as(StepVerifier::create) //
.expectNextCount(2) //
.expectNextCount(count) //
.verifyComplete();
}
@Test // #1917
void annotatedFinderMethodPagedShouldBeExecutedCorrectly() {
int count = 30;
SampleEntity[] sampleEntities = IntStream.range(1, count + 1)
.mapToObj(i -> SampleEntity.builder().id("id-" + i).message("test " + i).build()).collect(Collectors.toList())
.toArray(new SampleEntity[count]);
bulkIndex(sampleEntities).block();
repository.findAllViaAnnotatedQueryByMessageLikePaged("test", PageRequest.of(0, 20)) //
.as(StepVerifier::create) //
.expectNextCount(20) //
.verifyComplete();
repository.findAllViaAnnotatedQueryByMessageLikePaged("test", PageRequest.of(1, 20)) //
.as(StepVerifier::create) //
.expectNextCount(10) //
.verifyComplete();
}
@ -572,6 +595,9 @@ public class SimpleReactiveElasticsearchRepositoryTests {
@Query("{ \"bool\" : { \"must\" : { \"term\" : { \"message\" : \"?0\" } } } }")
Flux<SampleEntity> findAllViaAnnotatedQueryByMessageLike(String message);
@Query("{ \"bool\" : { \"must\" : { \"term\" : { \"message\" : \"?0\" } } } }")
Flux<SampleEntity> findAllViaAnnotatedQueryByMessageLikePaged(String message, Pageable pageable);
Mono<SampleEntity> findFirstByMessageLike(String message);
Mono<Long> countAllByMessage(String message);