Compare commits

...

49 Commits

Author SHA1 Message Date
Mark Paluch
229fd977cd
DATAES-905 - Release version 4.0.4 (Neumann SR4). 2020-09-16 11:43:15 +02:00
Mark Paluch
c80c821c30
DATAES-905 - Prepare 4.0.4 (Neumann SR4). 2020-09-16 11:42:47 +02:00
Mark Paluch
b12431dfec
DATAES-905 - Updated changelog. 2020-09-16 11:42:44 +02:00
Mark Paluch
1120ce402b
DATAES-888 - Updated changelog. 2020-09-16 11:20:14 +02:00
Mark Paluch
e5593a07a1
DATAES-887 - Updated changelog. 2020-09-16 10:39:06 +02:00
Peter-Josef Meisch
fa0fdd8c82
DATAES-924 - Conversion of properties of collections of Temporal values fails.
Original PR: #519

(cherry picked from commit 0e7791a6875baf48217f9265837705b508d1fdc9)
2020-09-15 23:25:00 +02:00
Peter-Josef Meisch
8686650261 DATAES-912 - Derived Query with "In" Keyword does not work on Text field.
Original PR: #510

(cherry picked from commit 79fdc449b873b317cc6d9544285e870c11a4d240)
2020-08-24 07:32:33 +02:00
Mark Paluch
3a522cd432
DATAES-890 - After release cleanups. 2020-08-12 13:19:59 +02:00
Mark Paluch
0a1eec8f0b
DATAES-890 - Prepare next development iteration. 2020-08-12 13:19:56 +02:00
Mark Paluch
63a3daf20a
DATAES-890 - Release version 4.0.3 (Neumann SR3). 2020-08-12 13:07:48 +02:00
Mark Paluch
4d5638c6d7
DATAES-890 - Prepare 4.0.3 (Neumann SR3). 2020-08-12 13:07:22 +02:00
Mark Paluch
5180b2f8cd
DATAES-890 - Updated changelog. 2020-08-12 13:07:18 +02:00
Mark Paluch
8eaf09cfc4
DATAES-872 - Updated changelog. 2020-08-12 12:01:28 +02:00
Peter-Josef Meisch
383fe3132e DATAES-896 - Use mainField property of @MultiField annotation.
Original PR: #500

(cherry picked from commit fd23c10c163e1959362c078fd8fa4b812ce11c01)
2020-08-09 16:40:05 +02:00
Peter-Josef Meisch
96ce05794e DATAES-897 - Add documentation for Highlight annotation.
Original PR: #499

(cherry picked from commit fd77f62cc4d2452aee8cfce56e037e3daa18477e)
2020-08-08 20:06:40 +02:00
Peter-Josef Meisch
4f29f0d60c DATAES-891 - Returning a Stream from a Query annotated repository method crashes.
Original PR: #497

(cherry picked from commit f989cf873b0e2a5e60044ffa1af42b77b05e9012)
2020-07-29 13:07:41 +02:00
Mark Paluch
886503c41c
DATAES-862 - After release cleanups. 2020-07-22 10:37:09 +02:00
Mark Paluch
c429436f1c
DATAES-862 - Prepare next development iteration. 2020-07-22 10:37:06 +02:00
Mark Paluch
afa611ce09
DATAES-862 - Release version 4.0.2 (Neumann SR2). 2020-07-22 10:21:10 +02:00
Mark Paluch
dc9db5dcdc
DATAES-862 - Prepare 4.0.2 (Neumann SR2). 2020-07-22 10:20:45 +02:00
Mark Paluch
4ee592cd21
DATAES-862 - Updated changelog. 2020-07-22 10:20:41 +02:00
Mark Paluch
cd7b6f8420
DATAES-861 - Updated changelog. 2020-07-22 10:08:51 +02:00
Mark Paluch
237c0ead2e
DATAES-860 - Updated changelog. 2020-07-22 09:44:37 +02:00
Peter-Josef Meisch
6462305521 DATAES-883 - Fix log level on resource load error.
Original PR: #493

(cherry picked from commit 0f940b36d7a89257694ed85639f1a89c4eb2a35a)
2020-07-10 21:20:42 +02:00
Peter-Josef Meisch
0a2038505f DATAES-878 - Wrong value for TermVector.
Original PR: #492

(cherry picked from commit df4e6c449d4b5cf7a9196d88045f7b7af9060311)
2020-07-02 06:45:15 +02:00
Mark Paluch
8276023132
DATAES-824 - Updated changelog. 2020-06-25 12:00:26 +02:00
Peter-Josef Meisch
ae94120d91 DATAES-865 - Polishing.
(cherry picked from commit 92f16846abaf7266de1e9669aadd3bd24f5b64a1)
2020-06-16 18:59:16 +02:00
Been24
d2df9e7f4c DATAES-865 - Fix MappingElasticsearchConverter writing an Object property containing a Map.
Original PR: #482

(cherry picked from commit 1de1aeb2c7ec80580cb2b4b1d98b724277862463)
2020-06-16 18:59:03 +02:00
Peter-Josef Meisch
73fc8f65ee DATAES-863 - Improve server error response handling.
Original PR: #480

(cherry picked from commit 3c44a1c96996ff2af496500505a8194e22b3de02)
2020-06-11 19:16:11 +02:00
Mark Paluch
4d2e4ac22c
DATAES-823 - After release cleanups. 2020-06-10 14:29:30 +02:00
Mark Paluch
8d02946186
DATAES-823 - Prepare next development iteration. 2020-06-10 14:29:27 +02:00
Mark Paluch
3ac4e12e08
DATAES-823 - Release version 4.0.1 (Neumann SR1). 2020-06-10 14:02:28 +02:00
Mark Paluch
bb69482b7b
DATAES-823 - Prepare 4.0.1 (Neumann SR1). 2020-06-10 14:02:00 +02:00
Mark Paluch
20f3298f72
DATAES-823 - Updated changelog. 2020-06-10 14:01:56 +02:00
Mark Paluch
3178707172
DATAES-807 - Updated changelog. 2020-06-10 12:29:56 +02:00
Mark Paluch
b60da78c5b
DATAES-806 - Updated changelog. 2020-06-10 11:40:30 +02:00
Peter-Josef Meisch
8e765cf07c DATAES-857 - Registered simple types are not read from list.
Original PR: #478

(cherry picked from commit 407c8c6c17cf13dffcf0c577fe7ea47bd6f96200)
2020-06-09 16:31:14 +02:00
Peter-Josef Meisch
ff999959a8 DATAES-850 - Add warning and docs for missing TemporalAccessor configuration.
Original PR: #472

(cherry picked from commit 859b22db8e396dc533d479dcf49a590c07b8dc24)
2020-05-31 23:06:38 +02:00
Peter-Josef Meisch
333aba2c59 DATAES-845 - MappingElasticsearchConverter handles lists with null values.
Original PR: #470

(cherry picked from commit 852273eff5c06dbd9e1ef4bcd28d2736c482bdf9)
2020-05-29 19:12:24 +02:00
Mark Paluch
e3e646eb72
DATAES-844 - Improve TOC formatting for migration guides. 2020-05-26 16:23:12 +02:00
Peter-Josef Meisch
b918605efd
DATAES-839 - ReactiveElasticsearchTemplate should use RequestFactory.
Original PR: #466

cherrypicked from dc6734db4391f236aeb11600204db28fe570fb34
2020-05-21 12:32:30 +02:00
Peter-Josef Meisch
c9667755f2
DATAES-835 - Fix code sample in documentation for scroll API.
Original PR: #462
2020-05-20 08:43:03 +02:00
Peter-Josef Meisch
421333dadc DATAES-832 - findAllById repository method returns iterable with null elements for not found ids. 2020-05-18 18:05:30 +02:00
Peter-Josef Meisch
34e3dc735c DATAES-832 - findAllById repository method returns iterable with null elements for not found ids. 2020-05-17 20:01:47 +02:00
Peter-Josef Meisch
e7110c14ab DATAES-831 - SearchOperations.searchForStream does not use requested maxResults.
Original PR: #459

(cherry picked from commit 506f79a45aa93ad5787b25d807de5e5970bf0ea3)
2020-05-17 10:53:29 +02:00
Peter-Josef Meisch
1cee4057d9
DATAES-828 - Fields of type date need to have a format defined.
Original PR: #457
2020-05-14 20:30:30 +02:00
Peter-Josef Meisch
68ce0c2184 DATAES-826 - Repositories should not try to create an index when it already exists.
original PR: #456

(cherry picked from commit c7339dc248370e5e726b6a808c74bb5bd4dc1db1)
2020-05-14 18:06:51 +02:00
Mark Paluch
9adfa0b389
DATAES-808 - After release cleanups. 2020-05-12 12:50:35 +02:00
Mark Paluch
d28f643997
DATAES-808 - Prepare next development iteration. 2020-05-12 12:40:53 +02:00
47 changed files with 1367 additions and 492 deletions

10
Jenkinsfile vendored
View File

@ -3,7 +3,7 @@ pipeline {
triggers {
pollSCM 'H/10 * * * *'
upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS)
upstream(upstreamProjects: "spring-data-commons/2.3.x", threshold: hudson.model.Result.SUCCESS)
}
options {
@ -15,7 +15,7 @@ pipeline {
stage("test: baseline (jdk8)") {
when {
anyOf {
branch 'master'
branch '4.0.x'
not { triggeredBy 'UpstreamCause' }
}
}
@ -36,7 +36,7 @@ pipeline {
stage("Test other configurations") {
when {
anyOf {
branch 'master'
branch '4.0.x'
not { triggeredBy 'UpstreamCause' }
}
}
@ -76,7 +76,7 @@ pipeline {
stage('Release to artifactory') {
when {
anyOf {
branch 'master'
branch '4.0.x'
not { triggeredBy 'UpstreamCause' }
}
}
@ -107,7 +107,7 @@ pipeline {
}
stage('Publish documentation') {
when {
branch 'master'
branch '4.0.x'
}
agent {
docker {

View File

@ -5,12 +5,12 @@
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-elasticsearch</artifactId>
<version>4.0.0.RELEASE</version>
<version>4.0.4.RELEASE</version>
<parent>
<groupId>org.springframework.data.build</groupId>
<artifactId>spring-data-parent</artifactId>
<version>2.3.0.RELEASE</version>
<version>2.3.4.RELEASE</version>
</parent>
<name>Spring Data Elasticsearch</name>
@ -21,7 +21,7 @@
<commonslang>2.6</commonslang>
<elasticsearch>7.6.2</elasticsearch>
<log4j>2.9.1</log4j>
<springdata.commons>2.3.0.RELEASE</springdata.commons>
<springdata.commons>2.3.4.RELEASE</springdata.commons>
<netty>4.1.39.Final</netty>
<java-module-name>spring.data.elasticsearch</java-module-name>
</properties>

View File

@ -44,4 +44,5 @@ include::{spring-data-commons-docs}/repository-namespace-reference.adoc[]
include::{spring-data-commons-docs}/repository-populator-namespace-reference.adoc[]
include::{spring-data-commons-docs}/repository-query-keywords-reference.adoc[]
include::{spring-data-commons-docs}/repository-query-return-types-reference.adoc[]
include::reference/migration-guides.adoc[]
:leveloffset: -1

View File

@ -9,7 +9,6 @@ The Spring Data Elasticsearch project applies core Spring concepts to the develo
You will notice similarities to the Spring data solr and mongodb support in the Spring Framework.
include::reference/elasticsearch-new.adoc[leveloffset=+1]
include::reference/elasticsearch-migration-guide-3.2-4.0.adoc[leveloffset=+1]
[[preface.metadata]]
== Project Metadata

View File

@ -1,25 +1,25 @@
[[elasticsearch-migration-guide-3.2-4.0]]
== Upgrading from 3.2.x to 4.0.x
= Upgrading from 3.2.x to 4.0.x
This section describes breaking changes from version 3.2.x to 4.0.x and how removed features can be replaced by new introduced features.
=== Removal of the used Jackson Mapper.
[[elasticsearch-migration-guide-3.2-4.0.jackson-removal]]
== Removal of the used Jackson Mapper
One of the changes in version 4.0.x is that Spring Data Elasticsearch does not use the Jackson Mapper anymore to map an entity to the JSON representation needed for Elasticsearch (see <<elasticsearch.mapping>>). In version 3.2.x the Jackson Mapper was the default that was used. It was possible to switch to the meta-model based converter (named `ElasticsearchEntityMapper`) by explicitly configuring it (<<elasticsearch.mapping.meta-model>>).
In version 4.0.x the meta-model based converter is the only one that is available and does not need to be configured explicitly. If you had a custom configuration to enable the meta-model converter by providing a bean like this:
[code,java]
[source,java]
----
@Bean
@Override
public EntityMapper entityMapper() {
public EntityMapper entityMapper() {
ElasticsearchEntityMapper entityMapper = new ElasticsearchEntityMapper(
elasticsearchMappingContext(), new DefaultConversionService()
elasticsearchMappingContext(), new DefaultConversionService()
);
entityMapper.setConversions(elasticsearchCustomConversions());
entityMapper.setConversions(elasticsearchCustomConversions());
return entityMapper;
}
@ -30,15 +30,15 @@ You now have to remove this bean, the `ElasticsearchEntityMapper` interface has
.Entity configuration
Some users had custom Jackson annotations on the entity class, for example in order to define a custom name for the mapped document in Elasticsearch or to configure date conversions. These are not taken into account anymore. The needed functionality is now provided with Spring Data Elasticsearch's `@Field` annotation. Please see <<elasticsearch.mapping.meta-model.annotations>> for detailed information.
=== Removal of implicit index name from query objects
[[elasticsearch-migration-guide-3.2-4.0.implicit-index-name]]
== Removal of implicit index name from query objects
In 3.2.x the different query classes like `IndexQuery` or `SearchQuery` had properties that were taking the index name or index names that they were operating upon. If these were not set, the passed in entity was inspected to retrieve the index name that was set in the `@Document` annotation. +
In 4.0.x the index name(s) must now be provided in an additional parameter of type `IndexCoordinates`. By separating this, it now is possible to use one query object against different indices.
So for example the following code:
[code,java]
[source,java]
----
IndexQuery indexQuery = new IndexQueryBuilder()
.withId(person.getId().toString())
@ -50,7 +50,7 @@ String documentId = elasticsearchOperations.index(indexQuery);
must be changed to:
[code,java]
[source,java]
----
IndexCoordinates indexCoordinates = elasticsearchOperations.getIndexCoordinatesFor(person.getClass());
@ -58,14 +58,14 @@ IndexQuery indexQuery = new IndexQueryBuilder()
.withId(person.getId().toString())
.withObject(person)
.build();
String documentId = elasticsearchOperations.index(indexQuery, indexCoordinates);
----
To make it easier to work with entities and use the index name that is contained in the entitie's `@Document` annotation, new methods have been added like `DocumentOperations.save(T entity)`;
=== The new Operations interfaces
[[elasticsearch-migration-guide-3.2-4.0.new-operations]]
== The new Operations interfaces
In version 3.2 there was the `ElasticsearchOperations` interface that defined all the methods for the `ElasticsearchTemplate` class. In version 4 the functions have been split into different interfaces, aligning these interfaces with the Elasticsearch API:
@ -77,10 +77,10 @@ In version 3.2 there was the `ElasticsearchOperations` interface that defined al
NOTE: All the functions from the `ElasticsearchOperations` interface in version 3.2 that are now moved to the `IndexOperations` interface are still available, they are marked as deprecated and have default implementations that delegate to the new implementation:
[code,java]
[source,java]
----
/**
* Create an index for given indexName .
* Create an index for given indexName.
*
* @param indexName the name of the index
* @return {@literal true} if the index was created
@ -92,17 +92,17 @@ default boolean createIndex(String indexName) {
}
----
[[elasticsearch-migration-guide-3.2-4.0.deprecations]]
== Deprecations
=== Deprecations
==== Methods and classes
=== Methods and classes
Many functions and classes have been deprecated. These functions still work, but the Javadocs show with what they should be replaced.
.Example from ElasticsearchOperations
[code,java]
[source,java]
----
/**
/*
* Retrieves an object from an index.
*
* @param query the query defining the id of the object to get
@ -113,15 +113,16 @@ Many functions and classes have been deprecated. These functions still work, but
@Deprecated
@Nullable
<T> T queryForObject(GetQuery query, Class<T> clazz);
----
----
==== Elasticsearch deprecations
=== Elasticsearch deprecations
Since version 7 the Elasticsearch `TransportClient` is deprecated, it will be removed with Elasticsearch version 8. Spring Data Elasticsearch deprecates the `ElasticsearchTemplate` class which uses the `TransportClient` in version 4.0.
Mapping types were removed from Elasticsearch 7, they still exist as deprecated values in the Spring Data `@Document` annotation and the `IndexCoordinates` class but they are not used anymore internally.
=== Removals
[[elasticsearch-migration-guide-3.2-4.0.removal]]
== Removals
* As already described, the `ElasticsearchEntityMapper` interface has been removed.
@ -130,4 +131,3 @@ Mapping types were removed from Elasticsearch 7, they still exist as deprecated
* The method `org.springframework.data.elasticsearch.core.ElasticsearchOperations.query(SearchQuery query, ResultsExtractor<T> resultsExtractor);` and the `org.springframework.data.elasticsearch.core.ResultsExtractor` interface have been removed. These could be used to parse the result from Elasticsearch for cases in which the response mapping done with the Jackson based mapper was not enough. Since version 4.0, there are the new <<elasticsearch.operations.searchresulttypes>> to return the information from an Elasticsearch response, so there is no need to expose this low level functionality.
* The low level methods `startScroll`, `continueScroll` and `clearScroll` have been removed from the `ElasticsearchOperations` interface. For low level scroll API access, there now are `searchScrollStart`, `searchScrollContinue` and `searchScrollClear` methods on the `ElasticsearchRestTemplate` class.

View File

@ -35,8 +35,6 @@ IndexCoordinates index = IndexCoordinates.of("sample-index");
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(matchAllQuery())
.withIndices(INDEX_NAME)
.withTypes(TYPE_NAME)
.withFields("message")
.withPageable(PageRequest.of(0, 10))
.build();
@ -62,8 +60,6 @@ IndexCoordinates index = IndexCoordinates.of("sample-index");
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(matchAllQuery())
.withIndices(INDEX_NAME)
.withTypes(TYPE_NAME)
.withFields("message")
.withPageable(PageRequest.of(0, 10))
.build();

View File

@ -43,11 +43,14 @@ The following annotations are available:
* `@Field`: Applied at the field level and defines properties of the field, most of the attributes map to the respective https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping.html[Elasticsearch Mapping] definitions (the following list is not complete, check the annotation Javadoc for a complete reference):
** `name`: The name of the field as it will be represented in the Elasticsearch document, if not set, the Java field name is used.
** `type`: the field type, can be one of _Text, Keyword, Long, Integer, Short, Byte, Double, Float, Half_Float, Scaled_Float, Date, Date_Nanos, Boolean, Binary, Integer_Range, Float_Range, Long_Range, Double_Range, Date_Range, Ip_Range, Object, Nested, Ip, TokenCount, Percolator, Flattened, Search_As_You_Type_. See https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-types.html[Elasticsearch Mapping Types]
** `format` and `pattern` custom definitions for the _Date_ type.
** `format` and `pattern` definitions for the _Date_ type. `format` must be defined for date types.
** `store`: Flag wether the original field value should be store in Elasticsearch, default value is _false_.
** `analyzer`, `searchAnalyzer`, `normalizer` for specifying custom custom analyzers and normalizer.
* `@GeoPoint`: marks a field as _geo_point_ datatype. Can be omitted if the field is an instance of the `GeoPoint` class.
NOTE: Properties that derive from `TemporalAccessor` must either have a `@Field` annotation of type `FieldType.Date` or a custom converter must be registerd for this type. +
If you are using a custom date format, you need to use _uuuu_ for the year instead of _yyyy_. This is due to a https://www.elastic.co/guide/en/elasticsearch/reference/current/migrate-to-java-time.html#java-time-migration-incompatible-date-formats[change in Elasticsearch 7].
The mapping metadata infrastructure is defined in a separate spring-data-commons project that is technology agnostic.
[[elasticsearch.mapping.meta-model.rules]]

View File

@ -3,8 +3,58 @@
This chapter includes details of the Elasticsearch repository implementation.
.The sample `Book` entity
====
[source,java]
----
@Document(indexName="books")
class Book {
@Id
private String id;
@Field(type = FieldType.text)
private String name;
@Field(type = FieldType.text)
private String summary;
@Field(type = FieldType.Integer)
private Integer price;
// getter/setter ...
}
----
====
include::elasticsearch-repository-queries.adoc[leveloffset=+1]
include::reactive-elasticsearch-repositories.adoc[leveloffset=+1]
[[elasticsearch.repositories.annotations]]
== Annotations for repository methods
=== @Highlight
The `@Highlight` annotation on a repository method defines for which fields of the returned entity highlighting should be included. To search for some text in a `Book` 's name or summary and have the found data highlighted, the following repository method can be used:
====
[source,java]
----
interface BookRepository extends Repository<Book, String> {
@Highlight(fields = {
@HighlightField(name = "name"),
@HighlightField(name = "summary")
})
List<SearchHit<Book>> findByNameOrSummary(String text, String summary);
}
----
====
It is possible to define multiple fields to be highlighted like above, and both the `@Highlight` and the `@HighlightField` annotation can further be customized with a `@HighlightParameters` annotation. Check the Javadocs for the possible configuration options.
In the search results the highlight data can be retrieved from the `SearchHit` class.
[[elasticsearch.annotation]]
== Annotation based configuration
@ -40,7 +90,8 @@ class ProductService {
}
----
<1> The `EnableElasticsearchRepositories` annotation activates the Repository support. If no base package is configured, it will use the one of the configuration class it is put on.
<1> The `EnableElasticsearchRepositories` annotation activates the Repository support.
If no base package is configured, it will use the one of the configuration class it is put on.
<2> Provide a Bean named `elasticsearchTemplate` of type `ElasticsearchOperations` by using one of the configurations shown in the <<elasticsearch.operations>> chapter.
<3> Let Spring inject the Repository bean into your class.
====
@ -145,5 +196,3 @@ Using the `Transport Client` or `Rest Client` element registers an instance of `
</beans>
----
====
include::reactive-elasticsearch-repositories.adoc[leveloffset=+1]

View File

@ -48,7 +48,9 @@ A list of supported keywords for Elasticsearch is shown below.
|===
| Keyword
| Sample
| Elasticsearch Query String| `And`
| Elasticsearch Query String
| `And`
| `findByNameAndPrice`
| `{ "query" : {
"bool" : {
@ -201,7 +203,7 @@ A list of supported keywords for Elasticsearch is shown below.
}
}}`
| `In`
| `In` (when annotated as FieldType.Keyword)
| `findByNameIn(Collection<String>names)`
| `{ "query" : {
"bool" : {
@ -215,7 +217,12 @@ A list of supported keywords for Elasticsearch is shown below.
}
}}`
| `NotIn`
| `In`
| `findByNameIn(Collection<String>names)`
| `{ "query": {"bool": {"must": [{"query_string":{"query": "\"?\" \"?\"", "fields": ["name"]}}]}}}`
| `NotIn` (when annotated as FieldType.Keyword)
| `findByNameNotIn(Collection<String>names)`
| `{ "query" : {
"bool" : {
@ -229,6 +236,10 @@ A list of supported keywords for Elasticsearch is shown below.
}
}}`
| `NotIn`
| `findByNameNotIn(Collection<String>names)`
| `{"query": {"bool": {"must": [{"query_string": {"query": "NOT(\"?\" \"?\")", "fields": ["name"]}}]}}}`
| `Near`
| `findByStoreNear`
| `Not Supported Yet !`

View File

@ -0,0 +1,6 @@
[[elasticsearch.migration]]
= Appendix E: Migration Guides
:leveloffset: +1
include::elasticsearch-migration-guide-3.2-4.0.adoc[]
:leveloffset: -1

View File

@ -0,0 +1,37 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch;
import org.springframework.dao.DataRetrievalFailureException;
import java.util.Map;
/**
* @author Peter-Josef Meisch
* @since 4.0.1 (ported back from master (4.1) branch)
*/
public class BulkFailureException extends DataRetrievalFailureException {
private final Map<String, String> failedDocuments;
public BulkFailureException(String msg, Map<String, String> failedDocuments) {
super(msg);
this.failedDocuments = failedDocuments;
}
public Map<String, String> getFailedDocuments() {
return failedDocuments;
}
}

View File

@ -22,6 +22,7 @@ import org.springframework.dao.UncategorizedDataAccessException;
* @since 4.0
*/
public class UncategorizedElasticsearchException extends UncategorizedDataAccessException {
public UncategorizedElasticsearchException(String msg, Throwable cause) {
super(msg, cause);
}

View File

@ -29,7 +29,7 @@ import java.lang.annotation.Target;
* @author Aleksei Arsenev
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
@Target(ElementType.ANNOTATION_TYPE)
public @interface InnerField {
String suffix();

View File

@ -20,5 +20,5 @@ package org.springframework.data.elasticsearch.annotations;
* @since 4.0
*/
public enum TermVector {
none, no, yes, with_positions, with_offsets, woth_positions_offsets, with_positions_payloads, with_positions_offets_payloads
none, no, yes, with_positions, with_offsets, with_positions_offsets, with_positions_payloads, with_positions_offets_payloads
}

View File

@ -804,53 +804,82 @@ public class DefaultReactiveElasticsearchClient implements ReactiveElasticsearch
private <T> Publisher<? extends T> handleServerError(Request request, ClientResponse response) {
RestStatus status = RestStatus.fromCode(response.statusCode().value());
int statusCode = response.statusCode().value();
RestStatus status = RestStatus.fromCode(statusCode);
String mediaType = response.headers().contentType().map(MediaType::toString).orElse(XContentType.JSON.mediaType());
return Mono.error(new ElasticsearchStatusException(String.format("%s request to %s returned error code %s.",
request.getMethod(), request.getEndpoint(), response.statusCode().value()), status));
return response.body(BodyExtractors.toMono(byte[].class)) //
.map(bytes -> new String(bytes, StandardCharsets.UTF_8)) //
.flatMap(content -> contentOrError(content, mediaType, status))
.flatMap(unused -> Mono
.error(new ElasticsearchStatusException(String.format("%s request to %s returned error code %s.",
request.getMethod(), request.getEndpoint(), statusCode), status)));
}
private <T> Publisher<? extends T> handleClientError(String logId, Request request, ClientResponse response,
Class<T> responseType) {
int statusCode = response.statusCode().value();
RestStatus status = RestStatus.fromCode(statusCode);
String mediaType = response.headers().contentType().map(MediaType::toString).orElse(XContentType.JSON.mediaType());
return response.body(BodyExtractors.toMono(byte[].class)) //
.map(bytes -> new String(bytes, StandardCharsets.UTF_8)) //
.flatMap(content -> {
String mediaType = response.headers().contentType().map(MediaType::toString)
.orElse(XContentType.JSON.mediaType());
RestStatus status = RestStatus.fromCode(response.statusCode().value());
try {
ElasticsearchException exception = getElasticsearchException(response, content, mediaType);
if (exception != null) {
StringBuilder sb = new StringBuilder();
buildExceptionMessages(sb, exception);
return Mono.error(new ElasticsearchStatusException(sb.toString(), status, exception));
}
} catch (Exception e) {
return Mono.error(new ElasticsearchStatusException(content, status));
}
return Mono.just(content);
}).doOnNext(it -> ClientLogger.logResponse(logId, response.statusCode(), it)) //
.flatMap(content -> contentOrError(content, mediaType, status)) //
.doOnNext(content -> ClientLogger.logResponse(logId, response.statusCode(), content)) //
.flatMap(content -> doDecode(response, responseType, content));
}
// region ElasticsearchException helper
/**
* checks if the given content body contains an {@link ElasticsearchException}, if yes it is returned in a Mono.error.
* Otherwise the content is returned in the Mono
*
* @param content the content to analyze
* @param mediaType the returned media type
* @param status the response status
* @return a Mono with the content or an Mono.error
*/
private static Mono<String> contentOrError(String content, String mediaType, RestStatus status) {
ElasticsearchException exception = getElasticsearchException(content, mediaType, status);
if (exception != null) {
StringBuilder sb = new StringBuilder();
buildExceptionMessages(sb, exception);
return Mono.error(new ElasticsearchStatusException(sb.toString(), status, exception));
}
return Mono.just(content);
}
/**
* tries to parse an {@link ElasticsearchException} from the given body content
*
* @param content the content to analyse
* @param mediaType the type of the body content
* @return an {@link ElasticsearchException} or {@literal null}.
*/
@Nullable
private ElasticsearchException getElasticsearchException(ClientResponse response, String content, String mediaType)
throws IOException {
private static ElasticsearchException getElasticsearchException(String content, String mediaType, RestStatus status) {
XContentParser parser = createParser(mediaType, content);
// we have a JSON object with an error and a status field
XContentParser.Token token = parser.nextToken(); // Skip START_OBJECT
try {
XContentParser parser = createParser(mediaType, content);
// we have a JSON object with an error and a status field
XContentParser.Token token = parser.nextToken(); // Skip START_OBJECT
do {
token = parser.nextToken();
do {
token = parser.nextToken();
if (parser.currentName().equals("error")) {
return ElasticsearchException.failureFromXContent(parser);
}
} while (token == XContentParser.Token.FIELD_NAME);
return null;
if (parser.currentName().equals("error")) {
return ElasticsearchException.failureFromXContent(parser);
}
} while (token == XContentParser.Token.FIELD_NAME);
return null;
} catch (IOException e) {
return new ElasticsearchStatusException(content, status);
}
}
private static void buildExceptionMessages(StringBuilder sb, Throwable t) {

View File

@ -36,7 +36,7 @@ import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.data.convert.EntityReader;
import org.springframework.data.elasticsearch.ElasticsearchException;
import org.springframework.data.elasticsearch.BulkFailureException;
import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter;
import org.springframework.data.elasticsearch.core.convert.MappingElasticsearchConverter;
import org.springframework.data.elasticsearch.core.document.Document;
@ -258,7 +258,11 @@ public abstract class AbstractElasticsearchTemplate implements ElasticsearchOper
long scrollTimeInMillis = TimeValue.timeValueMinutes(1).millis();
// noinspection ConstantConditions
int maxCount = query.isLimiting() ? query.getMaxResults() : 0;
return StreamQueries.streamResults( //
maxCount, //
searchScrollStart(scrollTimeInMillis, query, clazz, index), //
scrollId -> searchScrollContinue(scrollId, scrollTimeInMillis, clazz, index), //
this::searchScrollClear);
@ -401,7 +405,7 @@ public abstract class AbstractElasticsearchTemplate implements ElasticsearchOper
if (item.isFailed())
failedDocuments.put(item.getId(), item.getFailureMessage());
}
throw new ElasticsearchException(
throw new BulkFailureException(
"Bulk operation has failures. Use ElasticsearchException.getFailedDocuments() for detailed messages ["
+ failedDocuments + ']',
failedDocuments);

View File

@ -29,7 +29,9 @@ import org.apache.lucene.queryparser.flexible.core.util.StringUtils;
import org.apache.lucene.queryparser.flexible.standard.QueryParserUtil;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.springframework.data.elasticsearch.annotations.FieldType;
import org.springframework.data.elasticsearch.core.query.Criteria;
import org.springframework.data.elasticsearch.core.query.Field;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
@ -118,18 +120,19 @@ class CriteriaQueryProcessor {
Iterator<Criteria.CriteriaEntry> it = chainedCriteria.getQueryCriteriaEntries().iterator();
boolean singeEntryCriteria = (chainedCriteria.getQueryCriteriaEntries().size() == 1);
String fieldName = chainedCriteria.getField().getName();
Field field = chainedCriteria.getField();
String fieldName = field.getName();
Assert.notNull(fieldName, "Unknown field");
QueryBuilder query = null;
if (singeEntryCriteria) {
Criteria.CriteriaEntry entry = it.next();
query = processCriteriaEntry(entry, fieldName);
query = processCriteriaEntry(entry, field);
} else {
query = boolQuery();
while (it.hasNext()) {
Criteria.CriteriaEntry entry = it.next();
((BoolQueryBuilder) query).must(processCriteriaEntry(entry, fieldName));
((BoolQueryBuilder) query).must(processCriteriaEntry(entry, field));
}
}
@ -138,7 +141,11 @@ class CriteriaQueryProcessor {
}
@Nullable
private QueryBuilder processCriteriaEntry(Criteria.CriteriaEntry entry, String fieldName) {
private QueryBuilder processCriteriaEntry(Criteria.CriteriaEntry entry, Field field) {
String fieldName = field.getName();
boolean isKeywordField = FieldType.Keyword == field.getFieldType();
OperationKey key = entry.getKey();
Object value = entry.getValue();
@ -191,10 +198,24 @@ class CriteriaQueryProcessor {
query = fuzzyQuery(fieldName, searchText);
break;
case IN:
query = boolQuery().must(termsQuery(fieldName, toStringList((Iterable<Object>) value)));
if (value instanceof Iterable) {
Iterable<?> iterable = (Iterable<?>) value;
if (isKeywordField) {
query = boolQuery().must(termsQuery(fieldName, toStringList(iterable)));
} else {
query = queryStringQuery(orQueryString(iterable)).field(fieldName);
}
}
break;
case NOT_IN:
query = boolQuery().mustNot(termsQuery(fieldName, toStringList((Iterable<Object>) value)));
if (value instanceof Iterable) {
Iterable<?> iterable = (Iterable<?>) value;
if (isKeywordField) {
query = boolQuery().mustNot(termsQuery(fieldName, toStringList(iterable)));
} else {
query = queryStringQuery("NOT(" + orQueryString(iterable) + ')').field(fieldName);
}
}
break;
}
return query;
@ -208,6 +229,25 @@ class CriteriaQueryProcessor {
return list;
}
private static String orQueryString(Iterable<?> iterable) {
StringBuilder sb = new StringBuilder();
for (Object item : iterable) {
if (item != null) {
if (sb.length() > 0) {
sb.append(' ');
}
sb.append('"');
sb.append(QueryParserUtil.escape(item.toString()));
sb.append('"');
}
}
return sb.toString();
}
private void addBoost(QueryBuilder query, float boost) {
if (Float.isNaN(boost)) {
return;

View File

@ -40,6 +40,8 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter;
import org.springframework.data.elasticsearch.core.document.DocumentAdapters;
import org.springframework.data.elasticsearch.core.document.SearchDocumentResponse;
@ -88,6 +90,8 @@ import org.springframework.util.Assert;
*/
public class ElasticsearchRestTemplate extends AbstractElasticsearchTemplate {
private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchRestTemplate.class);
private RestHighLevelClient client;
private ElasticsearchExceptionTranslator exceptionTranslator;
@ -206,7 +210,7 @@ public class ElasticsearchRestTemplate extends AbstractElasticsearchTemplate {
Assert.notNull(id, "id must not be null");
Assert.notNull(index, "index must not be null");
DeleteRequest request = new DeleteRequest(index.getIndexName(), elasticsearchConverter.convertId(id));
DeleteRequest request = requestFactory.deleteRequest(elasticsearchConverter.convertId(id), index);
return execute(client -> client.delete(request, RequestOptions.DEFAULT).getId());
}
@ -300,9 +304,13 @@ public class ElasticsearchRestTemplate extends AbstractElasticsearchTemplate {
@Override
public void searchScrollClear(List<String> scrollIds) {
ClearScrollRequest request = new ClearScrollRequest();
request.scrollIds(scrollIds);
execute(client -> client.clearScroll(request, RequestOptions.DEFAULT));
try {
ClearScrollRequest request = new ClearScrollRequest();
request.scrollIds(scrollIds);
execute(client -> client.clearScroll(request, RequestOptions.DEFAULT));
} catch (Exception e) {
LOGGER.warn("Could not clear scroll: {}", e.getMessage());
}
}
@Override

View File

@ -86,6 +86,7 @@ import org.springframework.util.Assert;
public class ElasticsearchTemplate extends AbstractElasticsearchTemplate {
private static final Logger QUERY_LOGGER = LoggerFactory
.getLogger("org.springframework.data.elasticsearch.core.QUERY");
private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchTemplate.class);
private Client client;
@Nullable private String searchTimeout;
@ -322,7 +323,11 @@ public class ElasticsearchTemplate extends AbstractElasticsearchTemplate {
@Override
public void searchScrollClear(List<String> scrollIds) {
client.prepareClearScroll().setScrollIds(scrollIds).execute().actionGet();
try {
client.prepareClearScroll().setScrollIds(scrollIds).execute().actionGet();
} catch (Exception e) {
LOGGER.warn("Could not clear scroll: {}", e.getMessage());
}
}
@Override

View File

@ -15,15 +15,11 @@
*/
package org.springframework.data.elasticsearch.core;
import static org.elasticsearch.index.VersionType.*;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -41,19 +37,10 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.core.CountRequest;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.WrapperQueryBuilder;
import org.elasticsearch.index.reindex.BulkByScrollResponse;
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.reactivestreams.Publisher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -61,9 +48,9 @@ import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.data.convert.EntityReader;
import org.springframework.data.domain.Sort;
import org.springframework.data.elasticsearch.ElasticsearchException;
import org.springframework.data.elasticsearch.BulkFailureException;
import org.springframework.data.elasticsearch.NoSuchIndexException;
import org.springframework.data.elasticsearch.UncategorizedElasticsearchException;
import org.springframework.data.elasticsearch.client.reactive.ReactiveElasticsearchClient;
import org.springframework.data.elasticsearch.core.EntityOperations.AdaptibleEntity;
import org.springframework.data.elasticsearch.core.EntityOperations.Entity;
@ -82,10 +69,8 @@ import org.springframework.data.elasticsearch.core.mapping.SimpleElasticsearchMa
import org.springframework.data.elasticsearch.core.query.BulkOptions;
import org.springframework.data.elasticsearch.core.query.CriteriaQuery;
import org.springframework.data.elasticsearch.core.query.IndexQuery;
import org.springframework.data.elasticsearch.core.query.NativeSearchQuery;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.data.elasticsearch.core.query.SeqNoPrimaryTerm;
import org.springframework.data.elasticsearch.core.query.StringQuery;
import org.springframework.data.elasticsearch.core.query.UpdateQuery;
import org.springframework.data.elasticsearch.support.VersionInfo;
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
@ -194,6 +179,7 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
T savedEntity = it.getT1();
IndexResponse indexResponse = it.getT2();
AdaptibleEntity<T> adaptableEntity = operations.forEntity(savedEntity, converter.getConversionService());
// noinspection ReactiveStreamsNullableInLambdaInTransform
return adaptableEntity.populateIdIfNecessary(indexResponse.getId());
}).flatMap(saved -> maybeCallAfterSave(saved, index));
}
@ -268,7 +254,8 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
protected Flux<BulkItemResponse> doBulkOperation(List<?> queries, BulkOptions bulkOptions, IndexCoordinates index) {
BulkRequest bulkRequest = prepareWriteRequest(requestFactory.bulkRequest(queries, bulkOptions, index));
return client.bulk(bulkRequest) //
.onErrorMap(e -> new ElasticsearchException("Error while bulk for request: " + bulkRequest.toString(), e)) //
.onErrorMap(
e -> new UncategorizedElasticsearchException("Error while bulk for request: " + bulkRequest.toString(), e)) //
.flatMap(this::checkForBulkOperationFailure) //
.flatMapMany(response -> Flux.fromArray(response.getItems()));
}
@ -283,7 +270,7 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
failedDocuments.put(item.getId(), item.getFailureMessage());
}
}
ElasticsearchException exception = new ElasticsearchException(
BulkFailureException exception = new BulkFailureException(
"Bulk operation has failures. Use ElasticsearchException.getFailedDocuments() for detailed messages ["
+ failedDocuments + ']',
failedDocuments);
@ -315,9 +302,8 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
return doExists(id, index);
}
private Mono<Boolean> doExists(String id, @Nullable IndexCoordinates index) {
return Mono.defer(() -> doExists(new GetRequest(index.getIndexName(), id)));
private Mono<Boolean> doExists(String id, IndexCoordinates index) {
return Mono.defer(() -> doExists(requestFactory.getRequest(id, index)));
}
/**
@ -334,27 +320,30 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
private <T> Mono<Tuple2<T, IndexResponse>> doIndex(T entity, IndexCoordinates index) {
AdaptibleEntity<?> adaptibleEntity = operations.forEntity(entity, converter.getConversionService());
IndexRequest request = getIndexRequest(entity, adaptibleEntity, index);
IndexRequest request = requestFactory.indexRequest(getIndexQuery(entity), index);
request = prepareIndexRequest(entity, request);
return Mono.just(entity).zipWith(doIndex(request));
}
private IndexRequest getIndexRequest(Object value, AdaptibleEntity<?> entity, IndexCoordinates index) {
private IndexQuery getIndexQuery(Object value) {
AdaptibleEntity<?> entity = operations.forEntity(value, converter.getConversionService());
Object id = entity.getId();
IndexQuery query = new IndexQuery();
IndexRequest request = id != null ? new IndexRequest(index.getIndexName()).id(converter.convertId(id))
: new IndexRequest(index.getIndexName());
request.source(converter.mapObject(value).toJson(), Requests.INDEX_CONTENT_TYPE);
if (id != null) {
query.setId(id.toString());
}
query.setObject(value);
boolean usingSeqNo = false;
if (entity.hasSeqNoPrimaryTerm()) {
SeqNoPrimaryTerm seqNoPrimaryTerm = entity.getSeqNoPrimaryTerm();
if (seqNoPrimaryTerm != null) {
request.setIfSeqNo(seqNoPrimaryTerm.getSequenceNumber());
request.setIfPrimaryTerm(seqNoPrimaryTerm.getPrimaryTerm());
query.setSeqNo(seqNoPrimaryTerm.getSequenceNumber());
query.setPrimaryTerm(seqNoPrimaryTerm.getPrimaryTerm());
usingSeqNo = true;
}
}
@ -364,32 +353,11 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
Number version = entity.getVersion();
if (version != null) {
request.version(version.longValue());
request.versionType(EXTERNAL);
}
}
return request;
}
private IndexQuery getIndexQuery(Object value) {
AdaptibleEntity<?> entity = operations.forEntity(value, converter.getConversionService());
Object id = entity.getId();
IndexQuery query = new IndexQuery();
if (id != null) {
query.setId(id.toString());
}
query.setObject(value);
if (entity.isVersionedEntity()) {
Number version = entity.getVersion();
if (version != null) {
query.setVersion(version.longValue());
}
}
return query;
}
@ -410,9 +378,7 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
}
private Mono<GetResult> doGet(String id, ElasticsearchPersistentEntity<?> entity, IndexCoordinates index) {
return Mono.defer(() -> {
return doGet(new GetRequest(index.getIndexName(), id));
});
return Mono.defer(() -> doGet(requestFactory.getRequest(id, index)));
}
/**
@ -465,8 +431,8 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
private Mono<String> doDeleteById(String id, IndexCoordinates index) {
return Mono.defer(() -> {
return doDelete(prepareDeleteRequest(new DeleteRequest(index.getIndexName(), id)));
DeleteRequest request = requestFactory.deleteRequest(id, index);
return doDelete(prepareDeleteRequest(request));
});
}
@ -479,8 +445,7 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
Assert.notNull(query, "Query must not be null!");
return doDeleteBy(query, getPersistentEntityFor(entityType), index).map(BulkByScrollResponse::getDeleted)
.publishNext();
return doDeleteBy(query, entityType, index).map(BulkByScrollResponse::getDeleted).publishNext();
}
@Override
@ -488,13 +453,10 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
return delete(query, entityType, getIndexCoordinatesFor(entityType));
}
private Flux<BulkByScrollResponse> doDeleteBy(Query query, ElasticsearchPersistentEntity<?> entity,
IndexCoordinates index) {
private Flux<BulkByScrollResponse> doDeleteBy(Query query, Class<?> entityType, IndexCoordinates index) {
return Flux.defer(() -> {
DeleteByQueryRequest request = new DeleteByQueryRequest(index.getIndexNames());
request.setQuery(mappedQuery(query, entity));
DeleteByQueryRequest request = requestFactory.deleteByQueryRequest(query, entityType, index);
return doDeleteBy(prepareDeleteByRequest(request));
});
}
@ -552,8 +514,13 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
*/
protected DeleteByQueryRequest prepareDeleteByRequest(DeleteByQueryRequest request) {
if (refreshPolicy != null && !RefreshPolicy.NONE.equals(refreshPolicy)) {
request = request.setRefresh(true);
if (refreshPolicy != null) {
if (RefreshPolicy.NONE.equals(refreshPolicy)) {
request = request.setRefresh(false);
} else {
request = request.setRefresh(true);
}
}
if (indicesOptions != null) {
@ -661,43 +628,6 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
});
}
private CountRequest buildCountRequest(Query query, ElasticsearchPersistentEntity<?> entity, IndexCoordinates index) {
CountRequest request = new CountRequest(index.getIndexNames());
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.query(mappedQuery(query, entity));
searchSourceBuilder.trackScores(query.getTrackScores());
QueryBuilder postFilterQuery = mappedFilterQuery(query, entity);
if (postFilterQuery != null) {
searchSourceBuilder.postFilter(postFilterQuery);
}
if (query.getSourceFilter() != null) {
searchSourceBuilder.fetchSource(query.getSourceFilter().getIncludes(), query.getSourceFilter().getExcludes());
}
if (query instanceof NativeSearchQuery && ((NativeSearchQuery) query).getCollapseBuilder() != null) {
searchSourceBuilder.collapse(((NativeSearchQuery) query).getCollapseBuilder());
}
sort(query, entity).forEach(searchSourceBuilder::sort);
if (query.getMinScore() > 0) {
searchSourceBuilder.minScore(query.getMinScore());
}
if (query.getIndicesOptions() != null) {
request.indicesOptions(query.getIndicesOptions());
}
if (query.getPreference() != null) {
request.preference(query.getPreference());
}
request.source(searchSourceBuilder);
return request;
}
/**
* Customization hook on the actual execution result {@link Publisher}. <br />
*
@ -762,61 +692,6 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
.map(DocumentAdapters::from).onErrorResume(NoSuchIndexException.class, it -> Mono.empty());
}
@Nullable
private QueryBuilder mappedFilterQuery(Query query, ElasticsearchPersistentEntity<?> entity) {
if (query instanceof NativeSearchQuery) {
return ((NativeSearchQuery) query).getFilter();
}
return null;
}
private QueryBuilder mappedQuery(Query query, ElasticsearchPersistentEntity<?> entity) {
QueryBuilder elasticsearchQuery = null;
if (query instanceof CriteriaQuery) {
converter.updateQuery((CriteriaQuery) query, entity.getType());
elasticsearchQuery = new CriteriaQueryProcessor().createQueryFromCriteria(((CriteriaQuery) query).getCriteria());
} else if (query instanceof StringQuery) {
elasticsearchQuery = new WrapperQueryBuilder(((StringQuery) query).getSource());
} else if (query instanceof NativeSearchQuery) {
elasticsearchQuery = ((NativeSearchQuery) query).getQuery();
} else {
throw new IllegalArgumentException(String.format("Unknown query type '%s'.", query.getClass()));
}
return elasticsearchQuery != null ? elasticsearchQuery : QueryBuilders.matchAllQuery();
}
private static List<FieldSortBuilder> sort(Query query, ElasticsearchPersistentEntity<?> entity) {
if (query.getSort() == null || query.getSort().isUnsorted()) {
return Collections.emptyList();
}
List<FieldSortBuilder> mappedSort = new ArrayList<>();
for (Sort.Order order : query.getSort()) {
ElasticsearchPersistentProperty property = entity.getPersistentProperty(order.getProperty());
String fieldName = property != null ? property.getFieldName() : order.getProperty();
FieldSortBuilder sort = SortBuilders.fieldSort(fieldName)
.order(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC);
if (order.getNullHandling() == Sort.NullHandling.NULLS_FIRST) {
sort.missing("_first");
} else if (order.getNullHandling() == Sort.NullHandling.NULLS_LAST) {
sort.missing("_last");
}
mappedSort.add(sort);
}
return mappedSort;
}
/**
* Customization hook to modify a generated {@link SearchRequest} prior to its execution. Eg. by setting the
* {@link SearchRequest#indicesOptions(IndicesOptions) indices options} if applicable.
@ -950,7 +825,6 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
return Mono.just(entity);
}
// endregion
protected interface DocumentCallback<T> {

View File

@ -29,6 +29,7 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetRequestBuilder;
import org.elasticsearch.action.get.MultiGetRequest;
@ -249,6 +250,10 @@ class RequestFactory {
return deleteByQueryRequest;
}
public DeleteRequest deleteRequest(String id, IndexCoordinates index) {
return new DeleteRequest(index.getIndexName(), id);
}
@Deprecated
public DeleteByQueryRequestBuilder deleteByQueryRequestBuilder(Client client, DeleteQuery deleteQuery,
IndexCoordinates index) {
@ -344,6 +349,7 @@ class RequestFactory {
throw new ElasticsearchException(
"object or source is null, failed to index the document [id: " + query.getId() + ']');
}
if (query.getVersion() != null) {
indexRequest.version(query.getVersion());
VersionType versionType = retrieveVersionTypeFromPersistentEntity(query.getObject().getClass());
@ -353,6 +359,7 @@ class RequestFactory {
if (query.getSeqNo() != null) {
indexRequest.setIfSeqNo(query.getSeqNo());
}
if (query.getPrimaryTerm() != null) {
indexRequest.setIfPrimaryTerm(query.getPrimaryTerm());
}

View File

@ -38,8 +38,8 @@ public abstract class ResourceUtil {
/**
* Read a {@link ClassPathResource} into a {@link String}.
*
* @param url
* @return
* @param url url the file url
* @return the contents of the file or null if it could not be read
*/
@Nullable
public static String readFileFromClasspath(String url) {
@ -48,7 +48,7 @@ public abstract class ResourceUtil {
try (InputStream is = classPathResource.getInputStream()) {
return StreamUtils.copyToString(is, Charset.defaultCharset());
} catch (Exception e) {
LOGGER.debug(String.format("Failed to load file from url: %s: %s", url, e.getMessage()));
LOGGER.warn(String.format("Failed to load file from url: %s: %s", url, e.getMessage()));
return null;
}
}

View File

@ -18,6 +18,7 @@ package org.springframework.data.elasticsearch.core;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.function.Function;
@ -38,13 +39,15 @@ abstract class StreamQueries {
/**
* Stream query results using {@link SearchScrollHits}.
*
* @param maxCount the maximum number of entities to return, a value of 0 means that all available entities are
* returned
* @param searchHits the initial hits
* @param continueScrollFunction function to continue scrolling applies to the current scrollId.
* @param clearScrollConsumer consumer to clear the scroll context by accepting the scrollIds to clear.
* @param <T>
* @param <T> the entity type
* @return the {@link SearchHitsIterator}.
*/
static <T> SearchHitsIterator<T> streamResults(SearchScrollHits<T> searchHits,
static <T> SearchHitsIterator<T> streamResults(int maxCount, SearchScrollHits<T> searchHits,
Function<String, SearchScrollHits<T>> continueScrollFunction, Consumer<List<String>> clearScrollConsumer) {
Assert.notNull(searchHits, "searchHits must not be null.");
@ -59,20 +62,14 @@ abstract class StreamQueries {
return new SearchHitsIterator<T>() {
// As we couldn't retrieve single result with scroll, store current hits.
private volatile Iterator<SearchHit<T>> scrollHits = searchHits.iterator();
private volatile boolean continueScroll = scrollHits.hasNext();
private volatile AtomicInteger currentCount = new AtomicInteger();
private volatile Iterator<SearchHit<T>> currentScrollHits = searchHits.iterator();
private volatile boolean continueScroll = currentScrollHits.hasNext();
private volatile ScrollState scrollState = new ScrollState(searchHits.getScrollId());
@Override
public void close() {
try {
clearScrollConsumer.accept(scrollState.getScrollIds());
} finally {
scrollHits = null;
scrollState = null;
}
clearScrollConsumer.accept(scrollState.getScrollIds());
}
@Override
@ -99,24 +96,25 @@ abstract class StreamQueries {
@Override
public boolean hasNext() {
if (!continueScroll) {
if (!continueScroll || (maxCount > 0 && currentCount.get() >= maxCount)) {
return false;
}
if (!scrollHits.hasNext()) {
if (!currentScrollHits.hasNext()) {
SearchScrollHits<T> nextPage = continueScrollFunction.apply(scrollState.getScrollId());
scrollHits = nextPage.iterator();
currentScrollHits = nextPage.iterator();
scrollState.updateScrollId(nextPage.getScrollId());
continueScroll = scrollHits.hasNext();
continueScroll = currentScrollHits.hasNext();
}
return scrollHits.hasNext();
return currentScrollHits.hasNext();
}
@Override
public SearchHit<T> next() {
if (hasNext()) {
return scrollHits.next();
currentCount.incrementAndGet();
return currentScrollHits.next();
}
throw new NoSuchElementException();
}

View File

@ -15,18 +15,14 @@
*/
package org.springframework.data.elasticsearch.core.convert;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.time.temporal.TemporalAccessor;
import java.util.*;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ApplicationContext;
@ -44,6 +40,7 @@ import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersiste
import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentProperty;
import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentPropertyConverter;
import org.springframework.data.elasticsearch.core.query.CriteriaQuery;
import org.springframework.data.elasticsearch.core.query.Field;
import org.springframework.data.elasticsearch.core.query.SeqNoPrimaryTerm;
import org.springframework.data.mapping.PersistentPropertyAccessor;
import org.springframework.data.mapping.context.MappingContext;
@ -77,6 +74,8 @@ import org.springframework.util.ObjectUtils;
public class MappingElasticsearchConverter
implements ElasticsearchConverter, ApplicationContextAware, InitializingBean {
private static final Logger LOGGER = LoggerFactory.getLogger(MappingElasticsearchConverter.class);
private final MappingContext<? extends ElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> mappingContext;
private final GenericConversionService conversionService;
@ -85,6 +84,8 @@ public class MappingElasticsearchConverter
private ElasticsearchTypeMapper typeMapper;
private ConcurrentHashMap<String, Integer> propertyWarnings = new ConcurrentHashMap<>();
public MappingElasticsearchConverter(
MappingContext<? extends ElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> mappingContext) {
this(mappingContext, null);
@ -267,11 +268,26 @@ public class MappingElasticsearchConverter
return null;
}
if (property.hasPropertyConverter() && String.class.isAssignableFrom(source.getClass())) {
source = property.getPropertyConverter().read((String) source);
Class<R> rawType = targetType.getType();
if (property.hasPropertyConverter()) {
source = propertyConverterRead(property, source);
} else if (TemporalAccessor.class.isAssignableFrom(property.getType())
&& !conversions.hasCustomReadTarget(source.getClass(), rawType)) {
// log at most 5 times
String propertyName = property.getOwner().getType().getSimpleName() + '.' + property.getName();
String key = propertyName + "-read";
int count = propertyWarnings.computeIfAbsent(key, k -> 0);
if (count < 5) {
LOGGER.warn(
"Type {} of property {} is a TemporalAccessor class but has neither a @Field annotation defining the date type nor a registered converter for reading!"
+ " It cannot be mapped from a complex object in Elasticsearch!",
property.getType().getSimpleName(), propertyName);
propertyWarnings.put(key, count + 1);
}
}
Class<R> rawType = targetType.getType();
if (conversions.hasCustomReadTarget(source.getClass(), rawType)) {
return rawType.cast(conversionService.convert(source, rawType));
} else if (source instanceof List) {
@ -283,6 +299,32 @@ public class MappingElasticsearchConverter
return (R) readSimpleValue(source, targetType);
}
private Object propertyConverterRead(ElasticsearchPersistentProperty property, Object source) {
ElasticsearchPersistentPropertyConverter propertyConverter = Objects
.requireNonNull(property.getPropertyConverter());
if (source instanceof String[]) {
// convert to a List
source = Arrays.asList((String[]) source);
}
if (source instanceof List) {
source = ((List<?>) source).stream().map(it -> convertOnRead(propertyConverter, it)).collect(Collectors.toList());
} else if (source instanceof Set) {
source = ((Set<?>) source).stream().map(it -> convertOnRead(propertyConverter, it)).collect(Collectors.toSet());
} else {
source = convertOnRead(propertyConverter, source);
}
return source;
}
private Object convertOnRead(ElasticsearchPersistentPropertyConverter propertyConverter, Object source) {
if (String.class.isAssignableFrom(source.getClass())) {
source = propertyConverter.read((String) source);
}
return source;
}
@SuppressWarnings("unchecked")
@Nullable
private <R> R readCollectionValue(@Nullable List<?> source, ElasticsearchPersistentProperty property,
@ -293,14 +335,17 @@ public class MappingElasticsearchConverter
}
Collection<Object> target = createCollectionForValue(targetType, source.size());
TypeInformation<?> componentType = targetType.getComponentType();
for (Object value : source) {
if (value == null) {
target.add(null);
} else if (componentType != null && !ClassTypeInformation.OBJECT.equals(componentType)
&& isSimpleType(componentType.getType())) {
target.add(readSimpleValue(value, componentType));
} else if (isSimpleType(value)) {
target.add(
readSimpleValue(value, targetType.getComponentType() != null ? targetType.getComponentType() : targetType));
target.add(readSimpleValue(value, componentType != null ? componentType : targetType));
} else {
if (value instanceof List) {
@ -471,8 +516,21 @@ public class MappingElasticsearchConverter
}
if (property.hasPropertyConverter()) {
ElasticsearchPersistentPropertyConverter propertyConverter = property.getPropertyConverter();
value = propertyConverter.write(value);
value = propertyConverterWrite(property, value);
} else if (TemporalAccessor.class.isAssignableFrom(property.getActualType())
&& !conversions.hasCustomWriteTarget(value.getClass())) {
// log at most 5 times
String propertyName = entity.getType().getSimpleName() + '.' + property.getName();
String key = propertyName + "-write";
int count = propertyWarnings.computeIfAbsent(key, k -> 0);
if (count < 5) {
LOGGER.warn(
"Type {} of property {} is a TemporalAccessor class but has neither a @Field annotation defining the date type nor a registered converter for writing!"
+ " It will be mapped to a complex object in Elasticsearch!",
property.getType().getSimpleName(), propertyName);
propertyWarnings.put(key, count + 1);
}
}
if (!isSimpleType(value)) {
@ -486,6 +544,20 @@ public class MappingElasticsearchConverter
}
}
private Object propertyConverterWrite(ElasticsearchPersistentProperty property, Object value) {
ElasticsearchPersistentPropertyConverter propertyConverter = Objects
.requireNonNull(property.getPropertyConverter());
if (value instanceof List) {
value = ((List<?>) value).stream().map(propertyConverter::write).collect(Collectors.toList());
} else if (value instanceof Set) {
value = ((Set<?>) value).stream().map(propertyConverter::write).collect(Collectors.toSet());
} else {
value = propertyConverter.write(value);
}
return value;
}
protected void writeProperty(ElasticsearchPersistentProperty property, Object value, MapValueAccessor sink) {
Optional<Class<?>> customWriteTarget = conversions.getCustomWriteTarget(value.getClass());
@ -556,7 +628,9 @@ public class MappingElasticsearchConverter
Map<Object, Object> target = new LinkedHashMap<>();
Streamable<Entry<String, Object>> mapSource = Streamable.of(value.entrySet());
if (!typeHint.getActualType().getType().equals(Object.class)
TypeInformation<?> actualType = typeHint.getActualType();
if (actualType != null && !actualType.getType().equals(Object.class)
&& isSimpleType(typeHint.getMapValueType().getType())) {
mapSource.forEach(it -> {
@ -595,8 +669,14 @@ public class MappingElasticsearchConverter
: Streamable.of(ObjectUtils.toObjectArray(value));
List<Object> target = new ArrayList<>();
if (!typeHint.getActualType().getType().equals(Object.class) && isSimpleType(typeHint.getActualType().getType())) {
collectionSource.map(this::getWriteSimpleValue).forEach(target::add);
TypeInformation<?> actualType = typeHint.getActualType();
Class<?> type = actualType != null ? actualType.getType() : null;
if (type != null && !type.equals(Object.class) && isSimpleType(type)) {
// noinspection ReturnOfNull
collectionSource //
.map(element -> element != null ? getWriteSimpleValue(element) : null) //
.forEach(target::add);
} else {
collectionSource.map(it -> {
@ -670,10 +750,6 @@ public class MappingElasticsearchConverter
/**
* Compute the type to use by checking the given entity against the store type;
*
* @param entity
* @param source
* @return
*/
private ElasticsearchPersistentEntity<?> computeClosestEntity(ElasticsearchPersistentEntity<?> entity,
Map<String, Object> source) {
@ -709,11 +785,12 @@ public class MappingElasticsearchConverter
if (persistentEntity != null) {
criteriaQuery.getCriteria().getCriteriaChain().forEach(criteria -> {
String name = criteria.getField().getName();
Field field = criteria.getField();
String name = field.getName();
ElasticsearchPersistentProperty property = persistentEntity.getPersistentProperty(name);
if (property != null && property.getName().equals(name)) {
criteria.getField().setName(property.getFieldName());
field.setName(property.getFieldName());
if (property.hasPropertyConverter()) {
ElasticsearchPersistentPropertyConverter propertyConverter = property.getPropertyConverter();
@ -729,6 +806,13 @@ public class MappingElasticsearchConverter
}
});
}
org.springframework.data.elasticsearch.annotations.Field fieldAnnotation = property.findAnnotation(org.springframework.data.elasticsearch.annotations.Field.class);
if (fieldAnnotation != null) {
field.setFieldType(fieldAnnotation.type());
}
}
});
}

View File

@ -17,6 +17,8 @@ package org.springframework.data.elasticsearch.core.geo;
import org.springframework.data.geo.Point;
import java.util.Objects;
/**
* geo-location used for #{@link org.springframework.data.elasticsearch.core.query.Criteria}.
*
@ -60,6 +62,20 @@ public class GeoPoint {
return new Point(point.getLat(), point.getLon());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GeoPoint geoPoint = (GeoPoint) o;
return Double.compare(geoPoint.lat, lat) == 0 &&
Double.compare(geoPoint.lon, lon) == 0;
}
@Override
public int hashCode() {
return Objects.hash(lat, lon);
}
@Override
public String toString() {
return "GeoPoint{" +

View File

@ -137,8 +137,8 @@ public class MappingBuilder {
}
}
private void mapEntity(XContentBuilder builder, @Nullable ElasticsearchPersistentEntity entity, boolean isRootObject,
String nestedObjectFieldName, boolean nestedOrObjectField, FieldType fieldType,
private void mapEntity(XContentBuilder builder, @Nullable ElasticsearchPersistentEntity<?> entity,
boolean isRootObject, String nestedObjectFieldName, boolean nestedOrObjectField, FieldType fieldType,
@Nullable Field parentFieldAnnotation, @Nullable DynamicMapping dynamicMapping) throws IOException {
boolean writeNestedProperties = !isRootObject && (isAnyPropertyAnnotatedWithField(entity) || nestedOrObjectField);
@ -150,7 +150,7 @@ public class MappingBuilder {
if (nestedOrObjectField && FieldType.Nested == fieldType && parentFieldAnnotation != null
&& parentFieldAnnotation.includeInParent()) {
builder.field("include_in_parent", parentFieldAnnotation.includeInParent());
builder.field("include_in_parent", true);
}
}
@ -366,7 +366,7 @@ public class MappingBuilder {
MappingParameters mappingParameters = MappingParameters.from(annotation);
if (!nestedOrObjectField && mappingParameters.isStore()) {
builder.field(FIELD_PARAM_STORE, mappingParameters.isStore());
builder.field(FIELD_PARAM_STORE, true);
}
mappingParameters.writeTypeAndParametersTo(builder);
}

View File

@ -23,6 +23,7 @@ import java.util.List;
import org.springframework.data.elasticsearch.annotations.DateFormat;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
import org.springframework.data.elasticsearch.annotations.MultiField;
import org.springframework.data.elasticsearch.annotations.Parent;
import org.springframework.data.elasticsearch.annotations.Score;
import org.springframework.data.elasticsearch.core.convert.ElasticsearchDateConverter;
@ -83,6 +84,10 @@ public class SimpleElasticsearchPersistentProperty extends
throw new MappingException(String.format("Parent property %s must be of type String!", property.getName()));
}
if (isAnnotationPresent(Field.class) && isAnnotationPresent(MultiField.class)) {
throw new MappingException("@Field annotation must not be used on a @MultiField property.");
}
initDateConverter();
}
@ -114,60 +119,76 @@ public class SimpleElasticsearchPersistentProperty extends
*/
private void initDateConverter() {
Field field = findAnnotation(Field.class);
boolean isTemporalAccessor = TemporalAccessor.class.isAssignableFrom(getType());
boolean isDate = Date.class.isAssignableFrom(getType());
Class<?> actualType = getActualType();
boolean isTemporalAccessor = TemporalAccessor.class.isAssignableFrom(actualType);
boolean isDate = Date.class.isAssignableFrom(actualType);
if (field != null && field.type() == FieldType.Date && (isTemporalAccessor || isDate)) {
if (field != null && (field.type() == FieldType.Date || field.type() == FieldType.Date_Nanos)
&& (isTemporalAccessor || isDate)) {
DateFormat dateFormat = field.format();
ElasticsearchDateConverter converter = null;
if (dateFormat == DateFormat.none) {
throw new MappingException(
String.format("Property %s is annotated with FieldType.%s but has no DateFormat defined",
getOwner().getType().getSimpleName() + "." + getName(), field.type().name()));
}
ElasticsearchDateConverter converter;
if (dateFormat == DateFormat.custom) {
String pattern = field.pattern();
if (StringUtils.hasLength(pattern)) {
converter = ElasticsearchDateConverter.of(pattern);
if (!StringUtils.hasLength(pattern)) {
throw new MappingException(
String.format("Property %s is annotated with FieldType.%s and a custom format but has no pattern defined",
getOwner().getType().getSimpleName() + "." + getName(), field.type().name()));
}
} else if (dateFormat != DateFormat.none) {
converter = ElasticsearchDateConverter.of(pattern);
} else {
converter = ElasticsearchDateConverter.of(dateFormat);
}
if (converter != null) {
ElasticsearchDateConverter dateConverter = converter;
propertyConverter = new ElasticsearchPersistentPropertyConverter() {
@Override
public String write(Object property) {
if (isTemporalAccessor) {
return dateConverter.format((TemporalAccessor) property);
} else { // must be Date
return dateConverter.format((Date) property);
}
}
propertyConverter = new ElasticsearchPersistentPropertyConverter() {
final ElasticsearchDateConverter dateConverter = converter;
@SuppressWarnings("unchecked")
@Override
public Object read(String s) {
if (isTemporalAccessor) {
return dateConverter.parse(s, (Class<? extends TemporalAccessor>) getType());
} else { // must be date
return dateConverter.parse(s);
}
@Override
public String write(Object property) {
if (isTemporalAccessor && TemporalAccessor.class.isAssignableFrom(property.getClass())) {
return dateConverter.format((TemporalAccessor) property);
} else if (isDate && Date.class.isAssignableFrom(property.getClass())) {
return dateConverter.format((Date) property);
} else {
return property.toString();
}
};
}
}
@SuppressWarnings("unchecked")
@Override
public Object read(String s) {
if (isTemporalAccessor) {
return dateConverter.parse(s, (Class<? extends TemporalAccessor>) actualType);
} else { // must be date
return dateConverter.parse(s);
}
}
};
}
}
@SuppressWarnings("ConstantConditions")
@Nullable
private String getAnnotatedFieldName() {
if (isAnnotationPresent(Field.class)) {
String name = null;
String name = findAnnotation(Field.class).name();
return StringUtils.hasText(name) ? name : null;
if (isAnnotationPresent(Field.class)) {
name = findAnnotation(Field.class).name();
} else if (isAnnotationPresent(MultiField.class)) {
name = findAnnotation(MultiField.class).mainField().name();
}
return null;
return StringUtils.hasText(name) ? name : null;
}
/*

View File

@ -15,6 +15,9 @@
*/
package org.springframework.data.elasticsearch.core.query;
import org.springframework.data.elasticsearch.annotations.FieldType;
import org.springframework.lang.Nullable;
/**
* Defines a Field that can be used within a Criteria.
*
@ -27,4 +30,15 @@ public interface Field {
void setName(String name);
String getName();
/**
* @param fieldType sets the field's type
*/
void setFieldType(FieldType fieldType);
/**
* @return The annotated FieldType of the field
*/
@Nullable
FieldType getFieldType();
}

View File

@ -15,10 +15,13 @@
*/
package org.springframework.data.elasticsearch.core.query;
import org.springframework.data.elasticsearch.annotations.FieldType;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
/**
* The most trivial implementation of a Field
* The most trivial implementation of a Field. The {@link #name} is updatable, so it may be changed during query
* preparation by the {@link org.springframework.data.elasticsearch.core.convert.MappingElasticsearchConverter}.
*
* @author Rizwan Idrees
* @author Mohsin Husen
@ -27,27 +30,41 @@ import org.springframework.util.Assert;
public class SimpleField implements Field {
private String name;
@Nullable private FieldType fieldType;
public SimpleField(String name) {
Assert.notNull(name, "name must not be null");
Assert.hasText(name, "name must not be null");
this.name = name;
}
@Override
public void setName(String name) {
Assert.notNull(name, "name must not be null");
Assert.hasText(name, "name must not be null");
this.name = name;
}
@Override
public String getName() {
return this.name;
return name;
}
@Override
public void setFieldType(FieldType fieldType) {
this.fieldType = fieldType;
}
@Nullable
@Override
public FieldType getFieldType() {
return fieldType;
}
@Override
public String toString() {
return this.name;
return getName();
}
}

View File

@ -28,11 +28,12 @@ import org.springframework.data.repository.query.RepositoryQuery;
public abstract class AbstractElasticsearchRepositoryQuery implements RepositoryQuery {
protected static final int DEFAULT_STREAM_BATCH_SIZE = 500;
protected ElasticsearchQueryMethod queryMethod;
protected ElasticsearchOperations elasticsearchOperations;
public AbstractElasticsearchRepositoryQuery(ElasticsearchQueryMethod queryMethod,
ElasticsearchOperations elasticsearchOperations) {
ElasticsearchOperations elasticsearchOperations) {
this.queryMethod = queryMethod;
this.elasticsearchOperations = elasticsearchOperations;
}

View File

@ -43,8 +43,6 @@ import org.springframework.util.ClassUtils;
*/
public class ElasticsearchPartQuery extends AbstractElasticsearchRepositoryQuery {
private static final int DEFAULT_STREAM_BATCH_SIZE = 500;
private final PartTree tree;
private final ElasticsearchConverter elasticsearchConverter;
private final MappingContext<?, ElasticsearchPersistentProperty> mappingContext;

View File

@ -19,6 +19,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.SearchHitSupport;
import org.springframework.data.elasticsearch.core.SearchHits;
@ -26,6 +27,7 @@ import org.springframework.data.elasticsearch.core.convert.DateTimeConverters;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.StringQuery;
import org.springframework.data.repository.query.ParametersParameterAccessor;
import org.springframework.data.util.StreamUtils;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.NumberUtils;
@ -88,6 +90,13 @@ public class ElasticsearchStringQuery extends AbstractElasticsearchRepositoryQue
stringQuery.setPageable(accessor.getPageable());
SearchHits<?> searchHits = elasticsearchOperations.search(stringQuery, clazz, index);
result = SearchHitSupport.page(searchHits, stringQuery.getPageable());
} else if (queryMethod.isStreamQuery()) {
if (accessor.getPageable().isUnpaged()) {
stringQuery.setPageable(PageRequest.of(0, DEFAULT_STREAM_BATCH_SIZE));
} else {
stringQuery.setPageable(accessor.getPageable());
}
result = StreamUtils.createStreamFromIterator(elasticsearchOperations.searchForStream(stringQuery, clazz, index));
} else if (queryMethod.isCollectionQuery()) {
if (accessor.getPageable().isPaged()) {
stringQuery.setPageable(accessor.getPageable());

View File

@ -22,6 +22,7 @@ import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
@ -88,7 +89,7 @@ public abstract class AbstractElasticsearchRepository<T, ID> implements Elastics
this.entityClass = this.entityInformation.getJavaType();
this.indexOperations = operations.indexOps(this.entityClass);
try {
if (createIndexAndMapping()) {
if (createIndexAndMapping() && !indexOperations.exists()) {
createIndex();
putMapping();
}
@ -153,9 +154,21 @@ public abstract class AbstractElasticsearchRepository<T, ID> implements Elastics
@Override
public Iterable<T> findAllById(Iterable<ID> ids) {
Assert.notNull(ids, "ids can't be null.");
NativeSearchQuery query = new NativeSearchQueryBuilder().withIds(stringIdsRepresentation(ids)).build();
return operations.multiGet(query, getEntityClass(), getIndexCoordinates());
List<T> result = new ArrayList<>();
List<T> multiGetEntities = operations.multiGet(query, getEntityClass(), getIndexCoordinates());
multiGetEntities.forEach(entity -> {
if (entity != null) {
result.add(entity);
}
});
return result;
}
@Override

View File

@ -1,6 +1,150 @@
Spring Data Elasticsearch Changelog
===================================
Changes in version 4.0.4.RELEASE (2020-09-16)
---------------------------------------------
* DATAES-924 - Conversion of properties of collections of Temporal values fails.
* DATAES-912 - Derived Query with "In" Keyword does not work on Text field.
* DATAES-905 - Release 4.0.4 (Neumann SR4).
Changes in version 3.2.10.RELEASE (2020-09-16)
----------------------------------------------
* DATAES-903 - Update to Elasticsearch 6.8.12.
* DATAES-892 - Fix ElasticsearchEntityMapper recursive descent when reading Map objects.
* DATAES-888 - Release 3.2.10 (Moore SR10).
Changes in version 3.1.20.RELEASE (2020-09-16)
----------------------------------------------
* DATAES-887 - Release 3.1.20 (Lovelace SR20).
Changes in version 4.0.3.RELEASE (2020-08-12)
---------------------------------------------
* DATAES-897 - Add documentation for Highlight annotation.
* DATAES-896 - Use mainField property of @MultiField annotation instead of additional @Field annotation.
* DATAES-891 - Returning a Stream from a Query annotated repository method crashes.
* DATAES-890 - Release 4.0.3 (Neumann SR3).
Changes in version 4.1.0-M2 (2020-08-12)
----------------------------------------
* DATAES-901 - Operations deleting an entity should use a routing deducted from the entity.
* DATAES-899 - Add documentation for join-type.
* DATAES-897 - Add documentation for Highlight annotation.
* DATAES-896 - Use mainField property of @MultiField annotation instead of additional @Field annotation.
* DATAES-894 - Adapt to changes in Reactor.
* DATAES-893 - Adopt to changed module layout of Reactor Netty.
* DATAES-891 - Returning a Stream from a Query annotated repository method crashes.
* DATAES-886 - Complete reactive auditing.
* DATAES-883 - Fix log level on resource load error.
* DATAES-878 - Wrong value for TermVector(woth_positions_offsets).
* DATAES-877 - Update test logging dependency.
* DATAES-876 - Add seqno and primary term to entity on initial save.
* DATAES-875 - MappingElasticsearchConverter.updateQuery not called at all places.
* DATAES-874 - Deprecate parent-id related methods and fields.
* DATAES-872 - Release 4.1 M2 (2020.0.0).
* DATAES-869 - Update to Elasticsearch 7.8.
* DATAES-864 - Rework alias management.
* DATAES-842 - Documentation fixes.
* DATAES-612 - Add support for index templates.
* DATAES-433 - Replace parent-child mappings to join field.
* DATAES-321 - Support time base rolling indices.
* DATAES-244 - Support alias renaming.
* DATAES-233 - Support for rolling index strategy.
* DATAES-207 - Allow fetching indices by alias.
* DATAES-192 - Define alias for document.
* DATAES-150 - mapping are not created when entity is saved in new dynamic name index (spel).
Changes in version 4.0.2.RELEASE (2020-07-22)
---------------------------------------------
* DATAES-883 - Fix log level on resource load error.
* DATAES-878 - Wrong value for TermVector(woth_positions_offsets).
* DATAES-865 - Fix MappingElasticsearchConverter writing an Object property containing a Map.
* DATAES-863 - Improve server error response handling.
* DATAES-862 - Release 4.0.2 (Neumann SR2).
Changes in version 3.2.9.RELEASE (2020-07-22)
---------------------------------------------
* DATAES-861 - Release 3.2.9 (Moore SR9).
Changes in version 3.1.19.RELEASE (2020-07-22)
----------------------------------------------
* DATAES-860 - Release 3.1.19 (Lovelace SR19).
Changes in version 4.1.0-M1 (2020-06-25)
----------------------------------------
* DATAES-870 - Workaround for reactor-netty error.
* DATAES-868 - Upgrade to Netty 4.1.50.Final.
* DATAES-867 - Adopt to changes in Reactor Netty 1.0.
* DATAES-866 - Implement suggest search in reactive client.
* DATAES-865 - Fix MappingElasticsearchConverter writing an Object property containing a Map.
* DATAES-863 - Improve server error response handling.
* DATAES-859 - Don't use randomNumeric() in tests.
* DATAES-858 - Use standard Spring code of conduct.
* DATAES-857 - Registered simple types are not read from list.
* DATAES-853 - Cleanup tests that do not delete test indices.
* DATAES-852 - Upgrade to Elasticsearch 7.7.1.
* DATAES-850 - Add warning and documentation for missing TemporalAccessor configuration.
* DATAES-848 - Add the name of the index to SearchHit.
* DATAES-847 - Add missing DateFormat values.
* DATAES-845 - MappingElasticsearchConverter crashes when writing lists containing null values.
* DATAES-844 - Improve TOC formatting for migration guides.
* DATAES-841 - Remove deprecated type mappings code.
* DATAES-840 - Consolidate index name SpEL resolution.
* DATAES-839 - ReactiveElasticsearchTemplate should use RequestFactory.
* DATAES-838 - Update to Elasticsearch 7.7.0.
* DATAES-836 - Fix typo in Javadocs.
* DATAES-835 - Fix code sample in documentation for scroll API.
* DATAES-832 - findAllById repository method returns iterable with null elements for not found ids.
* DATAES-831 - SearchOperations.searchForStream does not use requested maxResults.
* DATAES-829 - Deprecate AbstractElasticsearchRepository and cleanup SimpleElasticsearchRepository.
* DATAES-828 - Fields of type date need to have a format defined.
* DATAES-827 - Repositories should not try to create an index when it already exists.
* DATAES-826 - Add method to IndexOperations to write an index mapping from a entity class.
* DATAES-825 - Update readme to use latest spring.io docs.
* DATAES-824 - Release 4.1 M1 (2020.0.0).
* DATAES-678 - Introduce ReactiveIndexOperations.
* DATAES-263 - Inner Hits support.
Changes in version 4.0.1.RELEASE (2020-06-10)
---------------------------------------------
* DATAES-857 - Registered simple types are not read from list.
* DATAES-850 - Add warning and documentation for missing TemporalAccessor configuration.
* DATAES-845 - MappingElasticsearchConverter crashes when writing lists containing null values.
* DATAES-844 - Improve TOC formatting for migration guides.
* DATAES-839 - ReactiveElasticsearchTemplate should use RequestFactory.
* DATAES-835 - Fix code sample in documentation for scroll API.
* DATAES-832 - findAllById repository method returns iterable with null elements for not found ids.
* DATAES-831 - SearchOperations.searchForStream does not use requested maxResults.
* DATAES-828 - Fields of type date need to have a format defined.
* DATAES-827 - Repositories should not try to create an index when it already exists.
* DATAES-823 - Release 4.0.1 (Neumann SR1).
Changes in version 3.2.8.RELEASE (2020-06-10)
---------------------------------------------
* DATAES-851 - Upgrade to Elasticsearch 6.8.10.
* DATAES-837 - Update to Elasticsearch 6.8.9.
* DATAES-821 - Fix code for adding an alias.
* DATAES-811 - Remove Travis CI.
* DATAES-807 - Release 3.2.8 (Moore SR8).
* DATAES-776 - Adapt RestClients class to change in InetSocketAddress class in JDK14.
* DATAES-767 - Fix ReactiveElasticsearch handling of 4xx HTTP responses.
Changes in version 3.1.18.RELEASE (2020-06-10)
----------------------------------------------
* DATAES-811 - Remove Travis CI.
* DATAES-806 - Release 3.1.18 (Lovelace SR18).
Changes in version 4.0.0.RELEASE (2020-05-12)
---------------------------------------------
* DATAES-822 - ElasticsearchRestTemplate should not use `spring-web`.
@ -1132,6 +1276,17 @@ Release Notes - Spring Data Elasticsearch - Version 1.0 M1 (2014-02-07)

View File

@ -1,4 +1,4 @@
Spring Data Elasticsearch 4.0 GA
Spring Data Elasticsearch 4.0.4 (Neumann SR4)
Copyright (c) [2013-2019] Pivotal Software, Inc.
This product is licensed to you under the Apache License, Version 2.0 (the "License").
@ -15,3 +15,7 @@ conditions of the subcomponent's license, as noted in the LICENSE file.

View File

@ -197,14 +197,22 @@ class ElasticsearchPartQueryTests {
String query = getQueryBuilder(methodName, parameterClasses, parameters);
String expected = "{\"query\": {" + //
" \"bool\" : {" + //
" \"must\" : [" + //
" {\"bool\" : {\"must\" : [{\"terms\" : {\"name\" : [\"" + names.get(0) + "\", \"" + names.get(1)
+ "\"]}}]}}" + //
" ]" + //
" }" + //
"}}"; //
String expected = "{\n" + //
" \"query\": {\n" + //
" \"bool\": {\n" + //
" \"must\": [\n" + //
" {\n" + //
" \"query_string\": {\n" + //
" \"query\": \"\\\"Title\\\" \\\"Title2\\\"\",\n" + //
" \"fields\": [\n" + //
" \"name^1.0\"\n" + //
" ]\n" + //
" }\n" + //
" }\n" + //
" ]\n" + //
" }\n" + //
" }\n" + //
"}\n"; //
assertEquals(expected, query, false);
}
@ -220,14 +228,22 @@ class ElasticsearchPartQueryTests {
String query = getQueryBuilder(methodName, parameterClasses, parameters);
String expected = "{\"query\": {" + //
" \"bool\" : {" + //
" \"must\" : [" + //
" {\"bool\" : {\"must_not\" : [{\"terms\" : {\"name\" : [\"" + names.get(0) + "\", \"" + names.get(1)
+ "\"]}}]}}" + //
" ]" + //
" }" + //
"}}"; //
String expected = "{\n" + //
" \"query\": {\n" + //
" \"bool\": {\n" + //
" \"must\": [\n" + //
" {\n" + //
" \"query_string\": {\n" + //
" \"query\": \"NOT(\\\"Title\\\" \\\"Title2\\\")\",\n" + //
" \"fields\": [\n" + //
" \"name^1.0\"\n" + //
" ]\n" + //
" }\n" + //
" }\n" + //
" ]\n" + //
" }\n" + //
" }\n" + //
"}\n"; //
assertEquals(expected, query, false);
}

View File

@ -77,7 +77,7 @@ import org.springframework.data.elasticsearch.annotations.ScriptedField;
import org.springframework.data.elasticsearch.core.geo.GeoPoint;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.*;
import org.springframework.data.util.CloseableIterator;
import org.springframework.data.util.StreamUtils;
import org.springframework.lang.Nullable;
/**
@ -1298,27 +1298,33 @@ public abstract class ElasticsearchTemplateTests {
assertThat(sampleEntities).hasSize(30);
}
@Test // DATAES-167
public void shouldReturnResultsWithStreamForGivenCriteriaQuery() {
@Test // DATAES-167, DATAES-831
public void shouldReturnAllResultsWithStreamForGivenCriteriaQuery() {
// given
List<IndexQuery> entities = createSampleEntitiesWithMessage("Test message", 30);
// when
operations.bulkIndex(entities, index);
operations.bulkIndex(createSampleEntitiesWithMessage("Test message", 30), index);
indexOperations.refresh();
// then
CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria());
criteriaQuery.setPageable(PageRequest.of(0, 10));
CloseableIterator<SearchHit<SampleEntity>> stream = operations.searchForStream(criteriaQuery, SampleEntity.class,
index);
List<SearchHit<SampleEntity>> sampleEntities = new ArrayList<>();
while (stream.hasNext()) {
sampleEntities.add(stream.next());
}
assertThat(sampleEntities).hasSize(30);
long count = StreamUtils
.createStreamFromIterator(operations.searchForStream(criteriaQuery, SampleEntity.class, index)).count();
assertThat(count).isEqualTo(30);
}
@Test // DATAES-831
void shouldLimitStreamResultToRequestedSize() {
operations.bulkIndex(createSampleEntitiesWithMessage("Test message", 30), index);
indexOperations.refresh();
CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria());
criteriaQuery.setMaxResults(10);
long count = StreamUtils
.createStreamFromIterator(operations.searchForStream(criteriaQuery, SampleEntity.class, index)).count();
assertThat(count).isEqualTo(10);
}
private static List<IndexQuery> createSampleEntitiesWithMessage(String message, int numberOfEntities) {
@ -3128,8 +3134,8 @@ public abstract class ElasticsearchTemplateTests {
operations.refresh(OptimisticEntity.class);
List<Query> queries = singletonList(queryForOne(saved.getId()));
List<SearchHits<OptimisticEntity>> retrievedHits = operations.multiSearch(queries,
OptimisticEntity.class, operations.getIndexCoordinatesFor(OptimisticEntity.class));
List<SearchHits<OptimisticEntity>> retrievedHits = operations.multiSearch(queries, OptimisticEntity.class,
operations.getIndexCoordinatesFor(OptimisticEntity.class));
OptimisticEntity retrieved = retrievedHits.get(0).getSearchHit(0).getContent();
assertThatSeqNoPrimaryTermIsFilled(retrieved);
@ -3162,8 +3168,7 @@ public abstract class ElasticsearchTemplateTests {
operations.save(forEdit1);
forEdit2.setMessage("It'll be great");
assertThatThrownBy(() -> operations.save(forEdit2))
.isInstanceOf(OptimisticLockingFailureException.class);
assertThatThrownBy(() -> operations.save(forEdit2)).isInstanceOf(OptimisticLockingFailureException.class);
}
@Test // DATAES-799
@ -3179,8 +3184,7 @@ public abstract class ElasticsearchTemplateTests {
operations.save(forEdit1);
forEdit2.setMessage("It'll be great");
assertThatThrownBy(() -> operations.save(forEdit2))
.isInstanceOf(OptimisticLockingFailureException.class);
assertThatThrownBy(() -> operations.save(forEdit2)).isInstanceOf(OptimisticLockingFailureException.class);
}
@Test // DATAES-799

View File

@ -34,6 +34,7 @@ import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.dao.DataAccessException;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.DateFormat;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
@ -146,7 +147,7 @@ public class LogEntityTests {
@Field(type = Ip) private String ip;
@Field(type = Date) private java.util.Date date;
@Field(type = Date, format = DateFormat.date_time) private java.util.Date date;
private LogEntity() {}

View File

@ -25,6 +25,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junit.jupiter.api.Test;
import org.springframework.data.util.StreamUtils;
/**
* @author Sascha Woo
@ -45,6 +46,7 @@ public class StreamQueriesTest {
// when
SearchHitsIterator<String> iterator = StreamQueries.streamResults( //
0, //
searchHits, //
scrollId -> newSearchScrollHits(Collections.emptyList(), scrollId), //
scrollIds -> clearScrollCalled.set(true));
@ -70,6 +72,7 @@ public class StreamQueriesTest {
// when
SearchHitsIterator<String> iterator = StreamQueries.streamResults( //
0, //
searchHits, //
scrollId -> newSearchScrollHits(Collections.emptyList(), scrollId), //
scrollId -> {});
@ -90,10 +93,12 @@ public class StreamQueriesTest {
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-2");
SearchScrollHits<String> searchHits4 = newSearchScrollHits(Collections.emptyList(), "s-3");
Iterator<SearchScrollHits<String>> searchScrollHitsIterator = Arrays.asList(searchHits1, searchHits2, searchHits3,searchHits4).iterator();
Iterator<SearchScrollHits<String>> searchScrollHitsIterator = Arrays
.asList(searchHits1, searchHits2, searchHits3, searchHits4).iterator();
List<String> clearedScrollIds = new ArrayList<>();
SearchHitsIterator<String> iterator = StreamQueries.streamResults( //
0, //
searchScrollHitsIterator.next(), //
scrollId -> searchScrollHitsIterator.next(), //
scrollIds -> clearedScrollIds.addAll(scrollIds));
@ -106,6 +111,56 @@ public class StreamQueriesTest {
assertThat(clearedScrollIds).isEqualTo(Arrays.asList("s-1", "s-2", "s-3"));
}
@Test // DATAES-831
void shouldReturnAllForRequestedSizeOf0() {
SearchScrollHits<String> searchHits1 = newSearchScrollHits(
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-1");
SearchScrollHits<String> searchHits2 = newSearchScrollHits(
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-2");
SearchScrollHits<String> searchHits3 = newSearchScrollHits(
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-2");
SearchScrollHits<String> searchHits4 = newSearchScrollHits(Collections.emptyList(), "s-3");
Iterator<SearchScrollHits<String>> searchScrollHitsIterator = Arrays
.asList(searchHits1, searchHits2, searchHits3, searchHits4).iterator();
SearchHitsIterator<String> iterator = StreamQueries.streamResults( //
0, //
searchScrollHitsIterator.next(), //
scrollId -> searchScrollHitsIterator.next(), //
scrollIds -> {});
long count = StreamUtils.createStreamFromIterator(iterator).count();
assertThat(count).isEqualTo(3);
}
@Test // DATAES-831
void shouldOnlyReturnRequestedCount() {
SearchScrollHits<String> searchHits1 = newSearchScrollHits(
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-1");
SearchScrollHits<String> searchHits2 = newSearchScrollHits(
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-2");
SearchScrollHits<String> searchHits3 = newSearchScrollHits(
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-2");
SearchScrollHits<String> searchHits4 = newSearchScrollHits(Collections.emptyList(), "s-3");
Iterator<SearchScrollHits<String>> searchScrollHitsIterator = Arrays
.asList(searchHits1, searchHits2, searchHits3, searchHits4).iterator();
SearchHitsIterator<String> iterator = StreamQueries.streamResults( //
2, //
searchScrollHitsIterator.next(), //
scrollId -> searchScrollHitsIterator.next(), //
scrollIds -> {});
long count = StreamUtils.createStreamFromIterator(iterator).count();
assertThat(count).isEqualTo(2);
}
private SearchScrollHits<String> newSearchScrollHits(List<SearchHit<String>> hits, String scrollId) {
return new SearchHitsImpl<String>(hits.size(), TotalHitsRelation.EQUAL_TO, 0, scrollId, hits, null);
}

View File

@ -26,6 +26,7 @@ import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
import java.time.LocalDate;
import java.util.ArrayList;
@ -39,6 +40,7 @@ import java.util.Map;
import org.json.JSONException;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.springframework.core.convert.ConversionService;
import org.springframework.core.convert.converter.Converter;
@ -616,7 +618,26 @@ public class MappingElasticsearchConverterUnitTests {
assertEquals(expected, json, false);
}
@Test
@Test // DATAES-924
@DisplayName("should write list of LocalDate")
void shouldWriteListOfLocalDate() throws JSONException {
LocalDatesEntity entity = new LocalDatesEntity();
entity.setId("4711");
entity.setDates(Arrays.asList(LocalDate.of(2020, 9, 15), LocalDate.of(2019, 5, 1)));
String expected = "{\n" + //
" \"id\": \"4711\",\n" + //
" \"dates\": [\"15.09.2020\", \"01.05.2019\"]\n" + //
"}\n"; //
Document document = Document.create();
mappingElasticsearchConverter.write(entity, document);
String json = document.toJson();
assertEquals(expected, json, false);
}
@Test // DATAES-716
void shouldReadLocalDate() {
Document document = Document.create();
document.put("id", "4711");
@ -632,6 +653,20 @@ public class MappingElasticsearchConverterUnitTests {
assertThat(person.getGender()).isEqualTo(Gender.MAN);
}
@Test // DATAES-924
@DisplayName("should read list of LocalDate")
void shouldReadListOfLocalDate() {
Document document = Document.create();
document.put("id", "4711");
document.put("dates", new String[] { "15.09.2020", "01.05.2019" });
LocalDatesEntity entity = mappingElasticsearchConverter.read(LocalDatesEntity.class, document);
assertThat(entity.getId()).isEqualTo("4711");
assertThat(entity.getDates()).hasSize(2).containsExactly(LocalDate.of(2020, 9, 15), LocalDate.of(2019, 5, 1));
}
@Test // DATAES-763
void writeEntityWithMapDataType() {
@ -718,6 +753,101 @@ public class MappingElasticsearchConverterUnitTests {
assertThat(entity.seqNoPrimaryTerm).isNull();
}
@Test // DATAES-845
void shouldWriteCollectionsWithNullValues() throws JSONException {
EntityWithListProperty entity = new EntityWithListProperty();
entity.setId("42");
entity.setValues(Arrays.asList(null, "two", null, "four"));
String expected = '{' + //
" \"id\": \"42\"," + //
" \"values\": [null, \"two\", null, \"four\"]" + //
'}';
Document document = Document.create();
mappingElasticsearchConverter.write(entity, document);
String json = document.toJson();
assertEquals(expected, json, false);
}
@Test // DATAES-857
void shouldWriteEntityWithListOfGeoPoints() throws JSONException {
GeoPointListEntity entity = new GeoPointListEntity();
entity.setId("42");
List<GeoPoint> locations = Arrays.asList(new GeoPoint(12.34, 23.45), new GeoPoint(34.56, 45.67));
entity.setLocations(locations);
String expected = "{\n" + //
" \"id\": \"42\",\n" + //
" \"locations\": [\n" + //
" {\n" + //
" \"lat\": 12.34,\n" + //
" \"lon\": 23.45\n" + //
" },\n" + //
" {\n" + //
" \"lat\": 34.56,\n" + //
" \"lon\": 45.67\n" + //
" }\n" + //
" ]\n" + //
"}"; //
Document document = Document.create();
mappingElasticsearchConverter.write(entity, document);
String json = document.toJson();
assertEquals(expected, json, false);
}
@Test // DATAES-857
void shouldReadEntityWithListOfGeoPoints() {
String json = "{\n" + //
" \"id\": \"42\",\n" + //
" \"locations\": [\n" + //
" {\n" + //
" \"lat\": 12.34,\n" + //
" \"lon\": 23.45\n" + //
" },\n" + //
" {\n" + //
" \"lat\": 34.56,\n" + //
" \"lon\": 45.67\n" + //
" }\n" + //
" ]\n" + //
"}"; //
Document document = Document.parse(json);
GeoPointListEntity entity = mappingElasticsearchConverter.read(GeoPointListEntity.class, document);
assertThat(entity.id).isEqualTo("42");
assertThat(entity.locations).containsExactly(new GeoPoint(12.34, 23.45), new GeoPoint(34.56, 45.67));
}
@Test // DATAES-865
void shouldWriteEntityWithMapAsObject() throws JSONException {
Map<String, Object> map = new LinkedHashMap<>();
map.put("foo", "bar");
EntityWithObject entity = new EntityWithObject();
entity.setId("42");
entity.setContent(map);
String expected = "{\n" + //
" \"id\": \"42\",\n" + //
" \"content\": {\n" + //
" \"foo\": \"bar\"\n" + //
" }\n" + //
"}\n"; //
Document document = Document.create();
mappingElasticsearchConverter.write(entity, document);
assertEquals(expected, document.toJson(), false);
}
private String pointTemplate(String name, Point point) {
return String.format(Locale.ENGLISH, "\"%s\":{\"lat\":%.1f,\"lon\":%.1f}", name, point.getX(), point.getY());
}
@ -755,6 +885,15 @@ public class MappingElasticsearchConverterUnitTests {
Map<String, Inventory> inventoryMap;
}
@Data
@Getter
@Setter
static class LocalDatesEntity {
@Id private String id;
@Field(name = "dates", type = FieldType.Date, format = DateFormat.custom,
pattern = "dd.MM.uuuu") private List<LocalDate> dates;
}
enum Gender {
MAN("1"), MACHINE("0");
@ -932,4 +1071,23 @@ public class MappingElasticsearchConverterUnitTests {
@Nullable private SeqNoPrimaryTerm seqNoPrimaryTerm;
}
@Data
static class EntityWithListProperty {
@Id private String id;
private List<String> values;
}
@Data
static class GeoPointListEntity {
@Id String id;
List<GeoPoint> locations;
}
@Data
static class EntityWithObject {
@Id private String id;
private Object content;
}
}

View File

@ -42,6 +42,7 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.assertj.core.data.Percentage;
@ -263,6 +264,7 @@ public class MappingBuilderTests extends MappingContextBaseTests {
}
@Test // DATAES-420
@SuppressWarnings({ "rawtypes", "unchecked" })
public void shouldUseBothAnalyzer() {
// given
@ -285,6 +287,7 @@ public class MappingBuilderTests extends MappingContextBaseTests {
}
@Test // DATAES-492
@SuppressWarnings("rawtypes")
public void shouldUseKeywordNormalizer() {
// given
@ -305,6 +308,7 @@ public class MappingBuilderTests extends MappingContextBaseTests {
}
@Test // DATAES-503
@SuppressWarnings("rawtypes")
public void shouldUseCopyTo() {
// given
@ -408,12 +412,29 @@ public class MappingBuilderTests extends MappingContextBaseTests {
assertEquals(expected, mapping, false);
}
@Test // DATAES-568
@Test // DATAES-568, DATAES-896
public void shouldUseFieldNameOnMultiField() throws JSONException {
// given
String expected = "{\"properties\":{" + "\"id-property\":{\"type\":\"keyword\",\"index\":true},"
+ "\"multifield-property\":{\"type\":\"text\",\"analyzer\":\"whitespace\",\"fields\":{\"prefix\":{\"type\":\"text\",\"analyzer\":\"stop\",\"search_analyzer\":\"standard\"}}}}}";
String expected = "{\n" + //
" \"properties\": {\n" + //
" \"id-property\": {\n" + //
" \"type\": \"keyword\",\n" + //
" \"index\": true\n" + //
" },\n" + //
" \"main-field\": {\n" + //
" \"type\": \"text\",\n" + //
" \"analyzer\": \"whitespace\",\n" + //
" \"fields\": {\n" + //
" \"suff-ix\": {\n" + //
" \"type\": \"text\",\n" + //
" \"analyzer\": \"stop\",\n" + //
" \"search_analyzer\": \"standard\"\n" + //
" }\n" + //
" }\n" + //
" }\n" + //
" }\n" + //
"}\n"; //
// when
String mapping = getMappingBuilder().buildPropertyMapping(FieldNameEntity.MultiFieldEntity.class);
@ -659,9 +680,10 @@ public class MappingBuilderTests extends MappingContextBaseTests {
@Nullable @Id @Field("id-property") private String id;
@Nullable @Field("multifield-property") //
@MultiField(mainField = @Field(type = FieldType.Text, analyzer = "whitespace"), otherFields = {
@InnerField(suffix = "prefix", type = FieldType.Text, analyzer = "stop", searchAnalyzer = "standard") }) //
@Nullable //
@MultiField(mainField = @Field(name = "main-field", type = FieldType.Text, analyzer = "whitespace"),
otherFields = {
@InnerField(suffix = "suff-ix", type = FieldType.Text, analyzer = "stop", searchAnalyzer = "standard") }) //
private String description;
}
}
@ -705,6 +727,7 @@ public class MappingBuilderTests extends MappingContextBaseTests {
* @author Stuart Stevenson
* @author Mohsin Husen
*/
@Data
@Document(indexName = "test-index-simple-recursive-mapping-builder", replicas = 0, refreshInterval = "-1")
static class SimpleRecursiveEntity {
@ -804,7 +827,7 @@ public class MappingBuilderTests extends MappingContextBaseTests {
*/
static class SampleInheritedEntityBuilder {
private SampleInheritedEntity result;
private final SampleInheritedEntity result;
public SampleInheritedEntityBuilder(String id) {
result = new SampleInheritedEntity();
@ -827,7 +850,7 @@ public class MappingBuilderTests extends MappingContextBaseTests {
public IndexQuery buildIndex() {
IndexQuery indexQuery = new IndexQuery();
indexQuery.setId(result.getId());
indexQuery.setId(Objects.requireNonNull(result.getId()));
indexQuery.setObject(result);
return indexQuery;
}
@ -859,7 +882,7 @@ public class MappingBuilderTests extends MappingContextBaseTests {
@Nullable @Id private String id;
@Nullable @Field(type = FieldType.Date, index = false) private Date createdDate;
@Nullable @Field(type = FieldType.Date, format = DateFormat.date_time, index = false) private Date createdDate;
@Nullable
public String getId() {
@ -1051,7 +1074,7 @@ public class MappingBuilderTests extends MappingContextBaseTests {
@Document(indexName = "valueDoc")
static class ValueDoc {
@Field(type = Text) private ValueObject valueObject;
@Nullable @Field(type = Text) private ValueObject valueObject;
}
@Getter

View File

@ -8,6 +8,7 @@ import org.junit.jupiter.api.Test;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
import org.springframework.data.elasticsearch.annotations.InnerField;
import org.springframework.data.elasticsearch.annotations.MultiField;
import org.springframework.data.elasticsearch.annotations.Score;
import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentEntity;
import org.springframework.lang.Nullable;
@ -31,9 +32,10 @@ public class MappingParametersTest extends MappingContextBaseTests {
@Test // DATAES-621
public void shouldCreateParametersForInnerFieldAnnotation() {
Annotation annotation = entity.getRequiredPersistentProperty("innerField").findAnnotation(InnerField.class);
MappingParameters mappingParameters = MappingParameters.from(annotation);
MultiField multiField = entity.getRequiredPersistentProperty("mainField").findAnnotation(MultiField.class);
InnerField innerField = multiField.otherFields()[0];
MappingParameters mappingParameters = MappingParameters.from(innerField);
assertThat(mappingParameters).isNotNull();
}
@ -61,7 +63,8 @@ public class MappingParametersTest extends MappingContextBaseTests {
static class AnnotatedClass {
@Nullable @Field private String field;
@Nullable @InnerField(suffix = "test", type = FieldType.Text) private String innerField;
@Nullable @MultiField(mainField = @Field,
otherFields = { @InnerField(suffix = "test", type = FieldType.Text) }) private String mainField;
@Score private float score;
@Nullable @Field(type = FieldType.Text, docValues = false) private String docValuesText;
@Nullable @Field(type = FieldType.Nested, docValues = false) private String docValuesNested;

View File

@ -41,10 +41,10 @@ public class SimpleElasticsearchDateMappingTests extends MappingContextBaseTests
private static final String EXPECTED_MAPPING = "{\"properties\":{\"message\":{\"store\":true,"
+ "\"type\":\"text\",\"index\":false,\"analyzer\":\"standard\"},\"customFormatDate\":{\"type\":\"date\",\"format\":\"dd.MM.uuuu hh:mm\"},"
+ "\"defaultFormatDate\":{\"type\":\"date\"},\"basicFormatDate\":{\""
+ "\"basicFormatDate\":{\""
+ "type\":\"date\",\"format\":\"basic_date\"}}}";
@Test // DATAES-568
@Test // DATAES-568, DATAES-828
public void testCorrectDateMappings() {
String mapping = getMappingBuilder().buildPropertyMapping(SampleDateMappingEntity.class);
@ -67,8 +67,6 @@ public class SimpleElasticsearchDateMappingTests extends MappingContextBaseTests
@Field(type = Date, format = DateFormat.custom,
pattern = "dd.MM.uuuu hh:mm") private LocalDateTime customFormatDate;
@Field(type = FieldType.Date) private LocalDateTime defaultFormatDate;
@Field(type = FieldType.Date, format = DateFormat.basic_date) private LocalDateTime basicFormatDate;
}
}

View File

@ -17,17 +17,23 @@ package org.springframework.data.elasticsearch.core.mapping;
import static org.assertj.core.api.Assertions.*;
import lombok.Data;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.springframework.data.elasticsearch.annotations.DateFormat;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
import org.springframework.data.elasticsearch.annotations.InnerField;
import org.springframework.data.elasticsearch.annotations.MultiField;
import org.springframework.data.elasticsearch.annotations.Score;
import org.springframework.data.elasticsearch.core.query.SeqNoPrimaryTerm;
import org.springframework.data.mapping.MappingException;
@ -74,7 +80,17 @@ public class SimpleElasticsearchPersistentPropertyUnitTests {
assertThat(persistentProperty.getFieldName()).isEqualTo("by-value");
}
@Test // DATAES-716, DATAES-792
@Test // DATAES-896
void shouldUseNameFromMultiFieldMainField() {
SimpleElasticsearchPersistentEntity<?> persistentEntity = context
.getRequiredPersistentEntity(MultiFieldProperty.class);
ElasticsearchPersistentProperty persistentProperty = persistentEntity.getPersistentProperty("mainfieldProperty");
assertThat(persistentProperty).isNotNull();
assertThat(persistentProperty.getFieldName()).isEqualTo("mainfield");
}
@Test // DATAES-716, DATAES-792, DATAES-924
void shouldSetPropertyConverters() {
SimpleElasticsearchPersistentEntity<?> persistentEntity = context.getRequiredPersistentEntity(DatesProperty.class);
@ -90,6 +106,9 @@ public class SimpleElasticsearchPersistentPropertyUnitTests {
assertThat(persistentProperty.hasPropertyConverter()).isTrue();
assertThat(persistentProperty.getPropertyConverter()).isNotNull();
persistentProperty = persistentEntity.getRequiredPersistentProperty("localDateList");
assertThat(persistentProperty.hasPropertyConverter()).isTrue();
assertThat(persistentProperty.getPropertyConverter()).isNotNull();
}
@Test // DATAES-716
@ -173,6 +192,28 @@ public class SimpleElasticsearchPersistentPropertyUnitTests {
assertThat(seqNoProperty.isReadable()).isFalse();
}
@Test // DATAES-828
void shouldRequireFormatForDateField() {
assertThatExceptionOfType(MappingException.class) //
.isThrownBy(() -> context.getRequiredPersistentEntity(DateFieldWithNoFormat.class)) //
.withMessageContaining("date");
}
@Test // DATAES-828
void shouldRequireFormatForDateNanosField() {
assertThatExceptionOfType(MappingException.class) //
.isThrownBy(() -> context.getRequiredPersistentEntity(DateNanosFieldWithNoFormat.class)) //
.withMessageContaining("date");
}
@Test // DATAES-924
@DisplayName("should require pattern for custom date format")
void shouldRequirePatternForCustomDateFormat() {
assertThatExceptionOfType(MappingException.class) //
.isThrownBy(() -> context.getRequiredPersistentEntity(DateFieldWithCustomFormatAndNoPattern.class)) //
.withMessageContaining("pattern");
}
static class InvalidScoreProperty {
@Nullable @Score String scoreProperty;
}
@ -185,14 +226,37 @@ public class SimpleElasticsearchPersistentPropertyUnitTests {
@Nullable @Field(value = "by-value") String fieldProperty;
}
static class MultiFieldProperty {
@Nullable @MultiField(mainField = @Field("mainfield"),
otherFields = { @InnerField(suffix = "suff", type = FieldType.Keyword) }) String mainfieldProperty;
}
static class DatesProperty {
@Nullable @Field(type = FieldType.Date, format = DateFormat.custom, pattern = "dd.MM.uuuu") LocalDate localDate;
@Nullable @Field(type = FieldType.Date, format = DateFormat.basic_date_time) LocalDateTime localDateTime;
@Nullable @Field(type = FieldType.Date, format = DateFormat.basic_date_time) Date legacyDate;
@Nullable @Field(type = FieldType.Date, format = DateFormat.custom,
pattern = "dd.MM.uuuu") List<LocalDate> localDateList;
}
@Data
static class SeqNoPrimaryTermProperty {
SeqNoPrimaryTerm seqNoPrimaryTerm;
String string;
}
@Data
static class DateFieldWithNoFormat {
@Field(type = FieldType.Date) LocalDateTime datetime;
}
@Data
static class DateFieldWithCustomFormatAndNoPattern {
@Field(type = FieldType.Date, format = DateFormat.custom, pattern = "") LocalDateTime datetime;
}
@Data
static class DateNanosFieldWithNoFormat {
@Field(type = FieldType.Date_Nanos) LocalDateTime datetime;
}
}

View File

@ -15,9 +15,9 @@
*/
package org.springframework.data.elasticsearch.repositories.custommethod;
import static org.apache.commons.lang.RandomStringUtils.*;
import static org.assertj.core.api.Assertions.*;
import static org.springframework.data.elasticsearch.annotations.FieldType.*;
import static org.springframework.data.elasticsearch.utils.IdGenerator.*;
import lombok.AllArgsConstructor;
import lombok.Builder;
@ -100,7 +100,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethod() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -119,7 +119,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodForNot() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("some");
@ -137,7 +137,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodWithQuery() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -157,7 +157,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodWithLessThan() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -165,7 +165,7 @@ public abstract class CustomMethodRepositoryBaseTests {
sampleEntity.setMessage("some message");
repository.save(sampleEntity);
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("test");
@ -185,7 +185,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodWithBefore() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -205,7 +205,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodWithAfter() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -225,7 +225,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodWithLike() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -245,7 +245,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodForStartingWith() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -265,7 +265,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodForEndingWith() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -285,7 +285,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodForContains() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -305,7 +305,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodForIn() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -313,7 +313,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
// given
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("test");
@ -334,7 +334,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodForNotIn() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -342,7 +342,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
// given
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("test");
@ -360,16 +360,16 @@ public abstract class CustomMethodRepositoryBaseTests {
}
@Test // DATAES-647
public void shouldHandleManyValuesQueryingIn() {
public void shouldHandleManyKeywordValuesQueryingIn() {
// given
String documentId1 = randomNumeric(32);
String documentId1 = nextIdAsString();
SampleEntity sampleEntity1 = new SampleEntity();
sampleEntity1.setId(documentId1);
sampleEntity1.setKeyword("foo");
repository.save(sampleEntity1);
String documentId2 = randomNumeric(32);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setKeyword("bar");
@ -378,8 +378,9 @@ public abstract class CustomMethodRepositoryBaseTests {
List<String> keywords = new ArrayList<>();
keywords.add("foo");
for (int i = 0; i < 1025; i++) {
keywords.add(randomNumeric(32));
// limit for normal query clauses is 1024, for keywords we change to terms queries
for (int i = 0; i < 1200; i++) {
keywords.add(nextIdAsString());
}
// when
@ -391,16 +392,16 @@ public abstract class CustomMethodRepositoryBaseTests {
}
@Test // DATAES-647
public void shouldHandleManyValuesQueryingNotIn() {
public void shouldHandleManyKeywordValuesQueryingNotIn() {
// given
String documentId1 = randomNumeric(32);
String documentId1 = nextIdAsString();
SampleEntity sampleEntity1 = new SampleEntity();
sampleEntity1.setId(documentId1);
sampleEntity1.setKeyword("foo");
repository.save(sampleEntity1);
String documentId2 = randomNumeric(32);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setKeyword("bar");
@ -409,8 +410,9 @@ public abstract class CustomMethodRepositoryBaseTests {
List<String> keywords = new ArrayList<>();
keywords.add("foo");
for (int i = 0; i < 1025; i++) {
keywords.add(randomNumeric(32));
// limit for normal query clauses is 1024, for keywords we change to terms queries
for (int i = 0; i < 1200; i++) {
keywords.add(nextIdAsString());
}
// when
@ -421,11 +423,51 @@ public abstract class CustomMethodRepositoryBaseTests {
assertThat(list.get(0).getId()).isEqualTo(documentId2);
}
@Test // DATAES-912
void shouldHandleTextFieldQueryingIn() {
String documentId1 = nextIdAsString();
SampleEntity sampleEntity1 = new SampleEntity();
sampleEntity1.setId(documentId1);
sampleEntity1.setMessage("foo");
repository.save(sampleEntity1);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setMessage("bar");
repository.save(sampleEntity2);
List<SampleEntity> list = repository.findByMessageIn(Arrays.asList("Foo", "Bar"));
assertThat(list).hasSize(2);
assertThat(list.stream().map(SampleEntity::getId)).containsExactlyInAnyOrder(documentId1, documentId2);
}
@Test // DATAES-912
void shouldHandleTextFieldQueryingNotIn() {
String documentId1 = nextIdAsString();
SampleEntity sampleEntity1 = new SampleEntity();
sampleEntity1.setId(documentId1);
sampleEntity1.setMessage("foo");
repository.save(sampleEntity1);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setMessage("bar");
repository.save(sampleEntity2);
List<SampleEntity> list = repository.findByMessageNotIn(Arrays.asList("Boo", "Bar"));
assertThat(list).hasSize(1);
assertThat(list.get(0).getId()).isEqualTo(documentId1);
}
@Test
public void shouldExecuteCustomMethodForTrue() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -434,7 +476,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
// given
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("test");
@ -453,7 +495,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodForFalse() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -462,7 +504,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
// given
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("test");
@ -482,7 +524,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodForOrderBy() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("abc");
@ -491,7 +533,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
// document 2
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("xyz");
@ -500,7 +542,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity2);
// document 3
String documentId3 = randomNumeric(5);
String documentId3 = nextIdAsString();
SampleEntity sampleEntity3 = new SampleEntity();
sampleEntity3.setId(documentId3);
sampleEntity3.setType("def");
@ -520,7 +562,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodWithBooleanParameter() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -529,7 +571,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
// given
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("test");
@ -549,7 +591,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldReturnPageableInUnwrappedPageResult() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -558,7 +600,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
// given
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("test");
@ -601,21 +643,21 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldReturnPageableResultsWithGivenSortingOrder() {
// given
String documentId = random(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setMessage("abc");
sampleEntity.setVersion(System.currentTimeMillis());
repository.save(sampleEntity);
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setMessage("abd");
sampleEntity.setVersion(System.currentTimeMillis());
repository.save(sampleEntity2);
String documentId3 = randomNumeric(5);
String documentId3 = nextIdAsString();
SampleEntity sampleEntity3 = new SampleEntity();
sampleEntity3.setId(documentId3);
sampleEntity3.setMessage("abe");
@ -635,21 +677,21 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldReturnListForMessage() {
// given
String documentId = random(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setMessage("abc");
sampleEntity.setVersion(System.currentTimeMillis());
repository.save(sampleEntity);
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setMessage("abd");
sampleEntity.setVersion(System.currentTimeMillis());
repository.save(sampleEntity2);
String documentId3 = randomNumeric(5);
String documentId3 = nextIdAsString();
SampleEntity sampleEntity3 = new SampleEntity();
sampleEntity3.setId(documentId3);
sampleEntity3.setMessage("abe");
@ -667,7 +709,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodWithGeoPoint() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -689,7 +731,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodWithGeoPointAndString() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -699,7 +741,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -722,7 +764,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodWithWithinGeoPoint() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -745,7 +787,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodWithWithinPoint() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -768,7 +810,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodWithNearBox() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -778,7 +820,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId);
sampleEntity2.setType("test2");
@ -808,7 +850,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldExecuteCustomMethodWithNearPointAndDistance() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -846,7 +888,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethod() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -854,7 +896,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId);
sampleEntity2.setType("test2");
@ -873,7 +915,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodForNot() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("some");
@ -881,7 +923,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId);
sampleEntity2.setType("test");
@ -900,7 +942,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodWithBooleanParameter() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -909,7 +951,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
// given
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("test");
@ -928,7 +970,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodWithLessThan() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -936,7 +978,7 @@ public abstract class CustomMethodRepositoryBaseTests {
sampleEntity.setMessage("some message");
repository.save(sampleEntity);
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("test");
@ -955,7 +997,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodWithBefore() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -964,7 +1006,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId);
sampleEntity2.setType("test");
@ -984,7 +1026,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodWithAfter() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -993,7 +1035,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId);
sampleEntity2.setType("test");
@ -1013,7 +1055,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodWithLike() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -1022,7 +1064,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId);
sampleEntity2.setType("test");
@ -1042,7 +1084,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodForStartingWith() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -1051,7 +1093,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId);
sampleEntity2.setType("test");
@ -1071,7 +1113,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodForEndingWith() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -1080,7 +1122,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId);
sampleEntity2.setType("test");
@ -1100,7 +1142,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodForContains() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -1109,7 +1151,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId);
sampleEntity2.setType("test");
@ -1129,7 +1171,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodForIn() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -1137,7 +1179,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
// given
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("test");
@ -1157,7 +1199,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodForNotIn() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -1165,7 +1207,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
// given
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("test");
@ -1185,7 +1227,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodForTrue() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -1194,7 +1236,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
// given
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("test");
@ -1212,7 +1254,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodForFalse() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -1221,7 +1263,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
// given
String documentId2 = randomNumeric(5);
String documentId2 = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
sampleEntity2.setType("test");
@ -1240,7 +1282,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodWithWithinGeoPoint() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -1250,7 +1292,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId);
sampleEntity2.setType("test");
@ -1271,7 +1313,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodWithWithinPoint() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -1281,7 +1323,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId);
sampleEntity2.setType("test");
@ -1302,7 +1344,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodWithNearBox() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -1312,7 +1354,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId);
sampleEntity2.setType("test2");
@ -1333,7 +1375,7 @@ public abstract class CustomMethodRepositoryBaseTests {
public void shouldCountCustomMethodWithNearPointAndDistance() {
// given
String documentId = randomNumeric(5);
String documentId = nextIdAsString();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setType("test");
@ -1343,7 +1385,7 @@ public abstract class CustomMethodRepositoryBaseTests {
repository.save(sampleEntity);
documentId = randomNumeric(5);
documentId = nextIdAsString();
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId);
sampleEntity2.setType("test");
@ -1544,6 +1586,28 @@ public abstract class CustomMethodRepositoryBaseTests {
return entities;
}
@Test // DATAES-891
void shouldStreamEntitiesWithQueryAnnotatedMethod() {
List<SampleEntity> entities = createSampleEntities("abc", 20);
repository.saveAll(entities);
Stream<SampleEntity> stream = streamingRepository.streamEntitiesByType("abc");
long count = stream.peek(sampleEntity -> assertThat(sampleEntity).isInstanceOf(SampleEntity.class)).count();
assertThat(count).isEqualTo(20);
}
@Test // DATAES-891
void shouldStreamSearchHitsWithQueryAnnotatedMethod() {
List<SampleEntity> entities = createSampleEntities("abc", 20);
repository.saveAll(entities);
Stream<SearchHit<SampleEntity>> stream = streamingRepository.streamSearchHitsByType("abc");
long count = stream.peek(sampleEntity -> assertThat(sampleEntity).isInstanceOf(SearchHit.class)).count();
assertThat(count).isEqualTo(20);
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@ -1600,6 +1664,10 @@ public abstract class CustomMethodRepositoryBaseTests {
List<SampleEntity> findByKeywordNotIn(List<String> keywords);
List<SampleEntity> findByMessageIn(List<String> keywords);
List<SampleEntity> findByMessageNotIn(List<String> keywords);
Page<SampleEntity> findByIdNotIn(List<String> ids, Pageable pageable);
Page<SampleEntity> findByAvailableTrue(Pageable pageable);
@ -1687,5 +1755,12 @@ public abstract class CustomMethodRepositoryBaseTests {
Stream<SampleEntity> findByType(String type);
Stream<SampleEntity> findByType(String type, Pageable pageable);
@Query("{\"bool\": {\"must\": [{\"term\": {\"type\": \"?0\"}}]}}")
Stream<SampleEntity> streamEntitiesByType(String type);
@Query("{\"bool\": {\"must\": [{\"term\": {\"type\": \"?0\"}}]}}")
Stream<SearchHit<SampleEntity>> streamSearchHitsByType(String type);
}
}

View File

@ -25,12 +25,14 @@ import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.IOException;
import java.lang.Long;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
@ -56,6 +58,7 @@ import org.springframework.data.elasticsearch.junit.jupiter.SpringIntegrationTes
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
import org.springframework.data.elasticsearch.repository.config.EnableElasticsearchRepositories;
import org.springframework.data.elasticsearch.utils.IndexInitializer;
import org.springframework.data.util.StreamUtils;
import org.springframework.test.context.ContextConfiguration;
/**
@ -361,6 +364,14 @@ public class SimpleElasticsearchRepositoryTests {
@Test
public void shouldDeleteAll() {
// given
String documentId = randomNumeric(5);
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setMessage("hello world.");
sampleEntity.setVersion(System.currentTimeMillis());
repository.save(sampleEntity);
// when
repository.deleteAll();
@ -677,6 +688,32 @@ public class SimpleElasticsearchRepositoryTests {
assertThat(savedEntities).hasSize(0);
}
@Test // DATAES-832
void shouldNotReturnNullValuesInFindAllById() {
// given
String documentId1 = "id-one";
SampleEntity sampleEntity1 = new SampleEntity();
sampleEntity1.setId(documentId1);
repository.save(sampleEntity1);
String documentId2 = "id-two";
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
repository.save(sampleEntity2);
String documentId3 = "id-three";
SampleEntity sampleEntity3 = new SampleEntity();
sampleEntity3.setId(documentId3);
repository.save(sampleEntity3);
Iterable<SampleEntity> allById = repository
.findAllById(Arrays.asList("id-one", "does-not-exist", "id-two", "where-am-i", "id-three"));
List<SampleEntity> results = StreamUtils.createStreamFromIterator(allById.iterator()).collect(Collectors.toList());
assertThat(results).hasSize(3);
assertThat(results.stream().map(SampleEntity::getId).collect(Collectors.toList()))
.containsExactlyInAnyOrder("id-one", "id-two", "id-three");
}
private static List<SampleEntity> createSampleEntitiesWithMessage(String message, int numberOfEntities) {
List<SampleEntity> sampleEntities = new ArrayList<>();

View File

@ -0,0 +1,41 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch.utils;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Class to provide sequential IDs. Uses an integer, 2^31 -1 values should be enough for the test runs.
*
* @author Peter-Josef Meisch
*/
public final class IdGenerator {
private static final AtomicInteger NEXT = new AtomicInteger();
private IdGenerator() {}
public static int nextIdAsInt() {
return NEXT.incrementAndGet();
}
public static double nextIdAsDouble() {
return NEXT.incrementAndGet();
}
public static String nextIdAsString() {
return "" + nextIdAsInt();
}
}