Compare commits

...

31 Commits

Author SHA1 Message Date
Mark Paluch
afa611ce09
DATAES-862 - Release version 4.0.2 (Neumann SR2). 2020-07-22 10:21:10 +02:00
Mark Paluch
dc9db5dcdc
DATAES-862 - Prepare 4.0.2 (Neumann SR2). 2020-07-22 10:20:45 +02:00
Mark Paluch
4ee592cd21
DATAES-862 - Updated changelog. 2020-07-22 10:20:41 +02:00
Mark Paluch
cd7b6f8420
DATAES-861 - Updated changelog. 2020-07-22 10:08:51 +02:00
Mark Paluch
237c0ead2e
DATAES-860 - Updated changelog. 2020-07-22 09:44:37 +02:00
Peter-Josef Meisch
6462305521 DATAES-883 - Fix log level on resource load error.
Original PR: #493

(cherry picked from commit 0f940b36d7a89257694ed85639f1a89c4eb2a35a)
2020-07-10 21:20:42 +02:00
Peter-Josef Meisch
0a2038505f DATAES-878 - Wrong value for TermVector.
Original PR: #492

(cherry picked from commit df4e6c449d4b5cf7a9196d88045f7b7af9060311)
2020-07-02 06:45:15 +02:00
Mark Paluch
8276023132
DATAES-824 - Updated changelog. 2020-06-25 12:00:26 +02:00
Peter-Josef Meisch
ae94120d91 DATAES-865 - Polishing.
(cherry picked from commit 92f16846abaf7266de1e9669aadd3bd24f5b64a1)
2020-06-16 18:59:16 +02:00
Been24
d2df9e7f4c DATAES-865 - Fix MappingElasticsearchConverter writing an Object property containing a Map.
Original PR: #482

(cherry picked from commit 1de1aeb2c7ec80580cb2b4b1d98b724277862463)
2020-06-16 18:59:03 +02:00
Peter-Josef Meisch
73fc8f65ee DATAES-863 - Improve server error response handling.
Original PR: #480

(cherry picked from commit 3c44a1c96996ff2af496500505a8194e22b3de02)
2020-06-11 19:16:11 +02:00
Mark Paluch
4d2e4ac22c
DATAES-823 - After release cleanups. 2020-06-10 14:29:30 +02:00
Mark Paluch
8d02946186
DATAES-823 - Prepare next development iteration. 2020-06-10 14:29:27 +02:00
Mark Paluch
3ac4e12e08
DATAES-823 - Release version 4.0.1 (Neumann SR1). 2020-06-10 14:02:28 +02:00
Mark Paluch
bb69482b7b
DATAES-823 - Prepare 4.0.1 (Neumann SR1). 2020-06-10 14:02:00 +02:00
Mark Paluch
20f3298f72
DATAES-823 - Updated changelog. 2020-06-10 14:01:56 +02:00
Mark Paluch
3178707172
DATAES-807 - Updated changelog. 2020-06-10 12:29:56 +02:00
Mark Paluch
b60da78c5b
DATAES-806 - Updated changelog. 2020-06-10 11:40:30 +02:00
Peter-Josef Meisch
8e765cf07c DATAES-857 - Registered simple types are not read from list.
Original PR: #478

(cherry picked from commit 407c8c6c17cf13dffcf0c577fe7ea47bd6f96200)
2020-06-09 16:31:14 +02:00
Peter-Josef Meisch
ff999959a8 DATAES-850 - Add warning and docs for missing TemporalAccessor configuration.
Original PR: #472

(cherry picked from commit 859b22db8e396dc533d479dcf49a590c07b8dc24)
2020-05-31 23:06:38 +02:00
Peter-Josef Meisch
333aba2c59 DATAES-845 - MappingElasticsearchConverter handles lists with null values.
Original PR: #470

(cherry picked from commit 852273eff5c06dbd9e1ef4bcd28d2736c482bdf9)
2020-05-29 19:12:24 +02:00
Mark Paluch
e3e646eb72
DATAES-844 - Improve TOC formatting for migration guides. 2020-05-26 16:23:12 +02:00
Peter-Josef Meisch
b918605efd
DATAES-839 - ReactiveElasticsearchTemplate should use RequestFactory.
Original PR: #466

cherrypicked from dc6734db4391f236aeb11600204db28fe570fb34
2020-05-21 12:32:30 +02:00
Peter-Josef Meisch
c9667755f2
DATAES-835 - Fix code sample in documentation for scroll API.
Original PR: #462
2020-05-20 08:43:03 +02:00
Peter-Josef Meisch
421333dadc DATAES-832 - findAllById repository method returns iterable with null elements for not found ids. 2020-05-18 18:05:30 +02:00
Peter-Josef Meisch
34e3dc735c DATAES-832 - findAllById repository method returns iterable with null elements for not found ids. 2020-05-17 20:01:47 +02:00
Peter-Josef Meisch
e7110c14ab DATAES-831 - SearchOperations.searchForStream does not use requested maxResults.
Original PR: #459

(cherry picked from commit 506f79a45aa93ad5787b25d807de5e5970bf0ea3)
2020-05-17 10:53:29 +02:00
Peter-Josef Meisch
1cee4057d9
DATAES-828 - Fields of type date need to have a format defined.
Original PR: #457
2020-05-14 20:30:30 +02:00
Peter-Josef Meisch
68ce0c2184 DATAES-826 - Repositories should not try to create an index when it already exists.
original PR: #456

(cherry picked from commit c7339dc248370e5e726b6a808c74bb5bd4dc1db1)
2020-05-14 18:06:51 +02:00
Mark Paluch
9adfa0b389
DATAES-808 - After release cleanups. 2020-05-12 12:50:35 +02:00
Mark Paluch
d28f643997
DATAES-808 - Prepare next development iteration. 2020-05-12 12:40:53 +02:00
33 changed files with 677 additions and 303 deletions

10
Jenkinsfile vendored
View File

@ -3,7 +3,7 @@ pipeline {
triggers {
pollSCM 'H/10 * * * *'
upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS)
upstream(upstreamProjects: "spring-data-commons/2.3.x", threshold: hudson.model.Result.SUCCESS)
}
options {
@ -15,7 +15,7 @@ pipeline {
stage("test: baseline (jdk8)") {
when {
anyOf {
branch 'master'
branch '4.0.x'
not { triggeredBy 'UpstreamCause' }
}
}
@ -36,7 +36,7 @@ pipeline {
stage("Test other configurations") {
when {
anyOf {
branch 'master'
branch '4.0.x'
not { triggeredBy 'UpstreamCause' }
}
}
@ -76,7 +76,7 @@ pipeline {
stage('Release to artifactory') {
when {
anyOf {
branch 'master'
branch '4.0.x'
not { triggeredBy 'UpstreamCause' }
}
}
@ -107,7 +107,7 @@ pipeline {
}
stage('Publish documentation') {
when {
branch 'master'
branch '4.0.x'
}
agent {
docker {

View File

@ -5,12 +5,12 @@
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-elasticsearch</artifactId>
<version>4.0.0.RELEASE</version>
<version>4.0.2.RELEASE</version>
<parent>
<groupId>org.springframework.data.build</groupId>
<artifactId>spring-data-parent</artifactId>
<version>2.3.0.RELEASE</version>
<version>2.3.2.RELEASE</version>
</parent>
<name>Spring Data Elasticsearch</name>
@ -21,7 +21,7 @@
<commonslang>2.6</commonslang>
<elasticsearch>7.6.2</elasticsearch>
<log4j>2.9.1</log4j>
<springdata.commons>2.3.0.RELEASE</springdata.commons>
<springdata.commons>2.3.2.RELEASE</springdata.commons>
<netty>4.1.39.Final</netty>
<java-module-name>spring.data.elasticsearch</java-module-name>
</properties>

View File

@ -44,4 +44,5 @@ include::{spring-data-commons-docs}/repository-namespace-reference.adoc[]
include::{spring-data-commons-docs}/repository-populator-namespace-reference.adoc[]
include::{spring-data-commons-docs}/repository-query-keywords-reference.adoc[]
include::{spring-data-commons-docs}/repository-query-return-types-reference.adoc[]
include::reference/migration-guides.adoc[]
:leveloffset: -1

View File

@ -9,7 +9,6 @@ The Spring Data Elasticsearch project applies core Spring concepts to the develo
You will notice similarities to the Spring data solr and mongodb support in the Spring Framework.
include::reference/elasticsearch-new.adoc[leveloffset=+1]
include::reference/elasticsearch-migration-guide-3.2-4.0.adoc[leveloffset=+1]
[[preface.metadata]]
== Project Metadata

View File

@ -1,25 +1,25 @@
[[elasticsearch-migration-guide-3.2-4.0]]
== Upgrading from 3.2.x to 4.0.x
= Upgrading from 3.2.x to 4.0.x
This section describes breaking changes from version 3.2.x to 4.0.x and how removed features can be replaced by new introduced features.
=== Removal of the used Jackson Mapper.
[[elasticsearch-migration-guide-3.2-4.0.jackson-removal]]
== Removal of the used Jackson Mapper
One of the changes in version 4.0.x is that Spring Data Elasticsearch does not use the Jackson Mapper anymore to map an entity to the JSON representation needed for Elasticsearch (see <<elasticsearch.mapping>>). In version 3.2.x the Jackson Mapper was the default that was used. It was possible to switch to the meta-model based converter (named `ElasticsearchEntityMapper`) by explicitly configuring it (<<elasticsearch.mapping.meta-model>>).
In version 4.0.x the meta-model based converter is the only one that is available and does not need to be configured explicitly. If you had a custom configuration to enable the meta-model converter by providing a bean like this:
[code,java]
[source,java]
----
@Bean
@Override
public EntityMapper entityMapper() {
public EntityMapper entityMapper() {
ElasticsearchEntityMapper entityMapper = new ElasticsearchEntityMapper(
elasticsearchMappingContext(), new DefaultConversionService()
elasticsearchMappingContext(), new DefaultConversionService()
);
entityMapper.setConversions(elasticsearchCustomConversions());
entityMapper.setConversions(elasticsearchCustomConversions());
return entityMapper;
}
@ -30,15 +30,15 @@ You now have to remove this bean, the `ElasticsearchEntityMapper` interface has
.Entity configuration
Some users had custom Jackson annotations on the entity class, for example in order to define a custom name for the mapped document in Elasticsearch or to configure date conversions. These are not taken into account anymore. The needed functionality is now provided with Spring Data Elasticsearch's `@Field` annotation. Please see <<elasticsearch.mapping.meta-model.annotations>> for detailed information.
=== Removal of implicit index name from query objects
[[elasticsearch-migration-guide-3.2-4.0.implicit-index-name]]
== Removal of implicit index name from query objects
In 3.2.x the different query classes like `IndexQuery` or `SearchQuery` had properties that were taking the index name or index names that they were operating upon. If these were not set, the passed in entity was inspected to retrieve the index name that was set in the `@Document` annotation. +
In 4.0.x the index name(s) must now be provided in an additional parameter of type `IndexCoordinates`. By separating this, it now is possible to use one query object against different indices.
So for example the following code:
[code,java]
[source,java]
----
IndexQuery indexQuery = new IndexQueryBuilder()
.withId(person.getId().toString())
@ -50,7 +50,7 @@ String documentId = elasticsearchOperations.index(indexQuery);
must be changed to:
[code,java]
[source,java]
----
IndexCoordinates indexCoordinates = elasticsearchOperations.getIndexCoordinatesFor(person.getClass());
@ -58,14 +58,14 @@ IndexQuery indexQuery = new IndexQueryBuilder()
.withId(person.getId().toString())
.withObject(person)
.build();
String documentId = elasticsearchOperations.index(indexQuery, indexCoordinates);
----
To make it easier to work with entities and use the index name that is contained in the entitie's `@Document` annotation, new methods have been added like `DocumentOperations.save(T entity)`;
=== The new Operations interfaces
[[elasticsearch-migration-guide-3.2-4.0.new-operations]]
== The new Operations interfaces
In version 3.2 there was the `ElasticsearchOperations` interface that defined all the methods for the `ElasticsearchTemplate` class. In version 4 the functions have been split into different interfaces, aligning these interfaces with the Elasticsearch API:
@ -77,10 +77,10 @@ In version 3.2 there was the `ElasticsearchOperations` interface that defined al
NOTE: All the functions from the `ElasticsearchOperations` interface in version 3.2 that are now moved to the `IndexOperations` interface are still available, they are marked as deprecated and have default implementations that delegate to the new implementation:
[code,java]
[source,java]
----
/**
* Create an index for given indexName .
* Create an index for given indexName.
*
* @param indexName the name of the index
* @return {@literal true} if the index was created
@ -92,17 +92,17 @@ default boolean createIndex(String indexName) {
}
----
[[elasticsearch-migration-guide-3.2-4.0.deprecations]]
== Deprecations
=== Deprecations
==== Methods and classes
=== Methods and classes
Many functions and classes have been deprecated. These functions still work, but the Javadocs show with what they should be replaced.
.Example from ElasticsearchOperations
[code,java]
[source,java]
----
/**
/*
* Retrieves an object from an index.
*
* @param query the query defining the id of the object to get
@ -113,15 +113,16 @@ Many functions and classes have been deprecated. These functions still work, but
@Deprecated
@Nullable
<T> T queryForObject(GetQuery query, Class<T> clazz);
----
----
==== Elasticsearch deprecations
=== Elasticsearch deprecations
Since version 7 the Elasticsearch `TransportClient` is deprecated, it will be removed with Elasticsearch version 8. Spring Data Elasticsearch deprecates the `ElasticsearchTemplate` class which uses the `TransportClient` in version 4.0.
Mapping types were removed from Elasticsearch 7, they still exist as deprecated values in the Spring Data `@Document` annotation and the `IndexCoordinates` class but they are not used anymore internally.
=== Removals
[[elasticsearch-migration-guide-3.2-4.0.removal]]
== Removals
* As already described, the `ElasticsearchEntityMapper` interface has been removed.
@ -130,4 +131,3 @@ Mapping types were removed from Elasticsearch 7, they still exist as deprecated
* The method `org.springframework.data.elasticsearch.core.ElasticsearchOperations.query(SearchQuery query, ResultsExtractor<T> resultsExtractor);` and the `org.springframework.data.elasticsearch.core.ResultsExtractor` interface have been removed. These could be used to parse the result from Elasticsearch for cases in which the response mapping done with the Jackson based mapper was not enough. Since version 4.0, there are the new <<elasticsearch.operations.searchresulttypes>> to return the information from an Elasticsearch response, so there is no need to expose this low level functionality.
* The low level methods `startScroll`, `continueScroll` and `clearScroll` have been removed from the `ElasticsearchOperations` interface. For low level scroll API access, there now are `searchScrollStart`, `searchScrollContinue` and `searchScrollClear` methods on the `ElasticsearchRestTemplate` class.

View File

@ -35,8 +35,6 @@ IndexCoordinates index = IndexCoordinates.of("sample-index");
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(matchAllQuery())
.withIndices(INDEX_NAME)
.withTypes(TYPE_NAME)
.withFields("message")
.withPageable(PageRequest.of(0, 10))
.build();
@ -62,8 +60,6 @@ IndexCoordinates index = IndexCoordinates.of("sample-index");
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(matchAllQuery())
.withIndices(INDEX_NAME)
.withTypes(TYPE_NAME)
.withFields("message")
.withPageable(PageRequest.of(0, 10))
.build();

View File

@ -43,11 +43,14 @@ The following annotations are available:
* `@Field`: Applied at the field level and defines properties of the field, most of the attributes map to the respective https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping.html[Elasticsearch Mapping] definitions (the following list is not complete, check the annotation Javadoc for a complete reference):
** `name`: The name of the field as it will be represented in the Elasticsearch document, if not set, the Java field name is used.
** `type`: the field type, can be one of _Text, Keyword, Long, Integer, Short, Byte, Double, Float, Half_Float, Scaled_Float, Date, Date_Nanos, Boolean, Binary, Integer_Range, Float_Range, Long_Range, Double_Range, Date_Range, Ip_Range, Object, Nested, Ip, TokenCount, Percolator, Flattened, Search_As_You_Type_. See https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-types.html[Elasticsearch Mapping Types]
** `format` and `pattern` custom definitions for the _Date_ type.
** `format` and `pattern` definitions for the _Date_ type. `format` must be defined for date types.
** `store`: Flag wether the original field value should be store in Elasticsearch, default value is _false_.
** `analyzer`, `searchAnalyzer`, `normalizer` for specifying custom custom analyzers and normalizer.
* `@GeoPoint`: marks a field as _geo_point_ datatype. Can be omitted if the field is an instance of the `GeoPoint` class.
NOTE: Properties that derive from `TemporalAccessor` must either have a `@Field` annotation of type `FieldType.Date` or a custom converter must be registerd for this type. +
If you are using a custom date format, you need to use _uuuu_ for the year instead of _yyyy_. This is due to a https://www.elastic.co/guide/en/elasticsearch/reference/current/migrate-to-java-time.html#java-time-migration-incompatible-date-formats[change in Elasticsearch 7].
The mapping metadata infrastructure is defined in a separate spring-data-commons project that is technology agnostic.
[[elasticsearch.mapping.meta-model.rules]]

View File

@ -0,0 +1,6 @@
[[elasticsearch.migration]]
= Appendix E: Migration Guides
:leveloffset: +1
include::elasticsearch-migration-guide-3.2-4.0.adoc[]
:leveloffset: -1

View File

@ -0,0 +1,37 @@
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch;
import org.springframework.dao.DataRetrievalFailureException;
import java.util.Map;
/**
* @author Peter-Josef Meisch
* @since 4.0.1 (ported back from master (4.1) branch)
*/
public class BulkFailureException extends DataRetrievalFailureException {
private final Map<String, String> failedDocuments;
public BulkFailureException(String msg, Map<String, String> failedDocuments) {
super(msg);
this.failedDocuments = failedDocuments;
}
public Map<String, String> getFailedDocuments() {
return failedDocuments;
}
}

View File

@ -22,6 +22,7 @@ import org.springframework.dao.UncategorizedDataAccessException;
* @since 4.0
*/
public class UncategorizedElasticsearchException extends UncategorizedDataAccessException {
public UncategorizedElasticsearchException(String msg, Throwable cause) {
super(msg, cause);
}

View File

@ -20,5 +20,5 @@ package org.springframework.data.elasticsearch.annotations;
* @since 4.0
*/
public enum TermVector {
none, no, yes, with_positions, with_offsets, woth_positions_offsets, with_positions_payloads, with_positions_offets_payloads
none, no, yes, with_positions, with_offsets, with_positions_offsets, with_positions_payloads, with_positions_offets_payloads
}

View File

@ -804,53 +804,82 @@ public class DefaultReactiveElasticsearchClient implements ReactiveElasticsearch
private <T> Publisher<? extends T> handleServerError(Request request, ClientResponse response) {
RestStatus status = RestStatus.fromCode(response.statusCode().value());
int statusCode = response.statusCode().value();
RestStatus status = RestStatus.fromCode(statusCode);
String mediaType = response.headers().contentType().map(MediaType::toString).orElse(XContentType.JSON.mediaType());
return Mono.error(new ElasticsearchStatusException(String.format("%s request to %s returned error code %s.",
request.getMethod(), request.getEndpoint(), response.statusCode().value()), status));
return response.body(BodyExtractors.toMono(byte[].class)) //
.map(bytes -> new String(bytes, StandardCharsets.UTF_8)) //
.flatMap(content -> contentOrError(content, mediaType, status))
.flatMap(unused -> Mono
.error(new ElasticsearchStatusException(String.format("%s request to %s returned error code %s.",
request.getMethod(), request.getEndpoint(), statusCode), status)));
}
private <T> Publisher<? extends T> handleClientError(String logId, Request request, ClientResponse response,
Class<T> responseType) {
int statusCode = response.statusCode().value();
RestStatus status = RestStatus.fromCode(statusCode);
String mediaType = response.headers().contentType().map(MediaType::toString).orElse(XContentType.JSON.mediaType());
return response.body(BodyExtractors.toMono(byte[].class)) //
.map(bytes -> new String(bytes, StandardCharsets.UTF_8)) //
.flatMap(content -> {
String mediaType = response.headers().contentType().map(MediaType::toString)
.orElse(XContentType.JSON.mediaType());
RestStatus status = RestStatus.fromCode(response.statusCode().value());
try {
ElasticsearchException exception = getElasticsearchException(response, content, mediaType);
if (exception != null) {
StringBuilder sb = new StringBuilder();
buildExceptionMessages(sb, exception);
return Mono.error(new ElasticsearchStatusException(sb.toString(), status, exception));
}
} catch (Exception e) {
return Mono.error(new ElasticsearchStatusException(content, status));
}
return Mono.just(content);
}).doOnNext(it -> ClientLogger.logResponse(logId, response.statusCode(), it)) //
.flatMap(content -> contentOrError(content, mediaType, status)) //
.doOnNext(content -> ClientLogger.logResponse(logId, response.statusCode(), content)) //
.flatMap(content -> doDecode(response, responseType, content));
}
// region ElasticsearchException helper
/**
* checks if the given content body contains an {@link ElasticsearchException}, if yes it is returned in a Mono.error.
* Otherwise the content is returned in the Mono
*
* @param content the content to analyze
* @param mediaType the returned media type
* @param status the response status
* @return a Mono with the content or an Mono.error
*/
private static Mono<String> contentOrError(String content, String mediaType, RestStatus status) {
ElasticsearchException exception = getElasticsearchException(content, mediaType, status);
if (exception != null) {
StringBuilder sb = new StringBuilder();
buildExceptionMessages(sb, exception);
return Mono.error(new ElasticsearchStatusException(sb.toString(), status, exception));
}
return Mono.just(content);
}
/**
* tries to parse an {@link ElasticsearchException} from the given body content
*
* @param content the content to analyse
* @param mediaType the type of the body content
* @return an {@link ElasticsearchException} or {@literal null}.
*/
@Nullable
private ElasticsearchException getElasticsearchException(ClientResponse response, String content, String mediaType)
throws IOException {
private static ElasticsearchException getElasticsearchException(String content, String mediaType, RestStatus status) {
XContentParser parser = createParser(mediaType, content);
// we have a JSON object with an error and a status field
XContentParser.Token token = parser.nextToken(); // Skip START_OBJECT
try {
XContentParser parser = createParser(mediaType, content);
// we have a JSON object with an error and a status field
XContentParser.Token token = parser.nextToken(); // Skip START_OBJECT
do {
token = parser.nextToken();
do {
token = parser.nextToken();
if (parser.currentName().equals("error")) {
return ElasticsearchException.failureFromXContent(parser);
}
} while (token == XContentParser.Token.FIELD_NAME);
return null;
if (parser.currentName().equals("error")) {
return ElasticsearchException.failureFromXContent(parser);
}
} while (token == XContentParser.Token.FIELD_NAME);
return null;
} catch (IOException e) {
return new ElasticsearchStatusException(content, status);
}
}
private static void buildExceptionMessages(StringBuilder sb, Throwable t) {

View File

@ -36,7 +36,7 @@ import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.data.convert.EntityReader;
import org.springframework.data.elasticsearch.ElasticsearchException;
import org.springframework.data.elasticsearch.BulkFailureException;
import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter;
import org.springframework.data.elasticsearch.core.convert.MappingElasticsearchConverter;
import org.springframework.data.elasticsearch.core.document.Document;
@ -258,7 +258,11 @@ public abstract class AbstractElasticsearchTemplate implements ElasticsearchOper
long scrollTimeInMillis = TimeValue.timeValueMinutes(1).millis();
// noinspection ConstantConditions
int maxCount = query.isLimiting() ? query.getMaxResults() : 0;
return StreamQueries.streamResults( //
maxCount, //
searchScrollStart(scrollTimeInMillis, query, clazz, index), //
scrollId -> searchScrollContinue(scrollId, scrollTimeInMillis, clazz, index), //
this::searchScrollClear);
@ -401,7 +405,7 @@ public abstract class AbstractElasticsearchTemplate implements ElasticsearchOper
if (item.isFailed())
failedDocuments.put(item.getId(), item.getFailureMessage());
}
throw new ElasticsearchException(
throw new BulkFailureException(
"Bulk operation has failures. Use ElasticsearchException.getFailedDocuments() for detailed messages ["
+ failedDocuments + ']',
failedDocuments);

View File

@ -40,6 +40,8 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter;
import org.springframework.data.elasticsearch.core.document.DocumentAdapters;
import org.springframework.data.elasticsearch.core.document.SearchDocumentResponse;
@ -88,6 +90,8 @@ import org.springframework.util.Assert;
*/
public class ElasticsearchRestTemplate extends AbstractElasticsearchTemplate {
private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchRestTemplate.class);
private RestHighLevelClient client;
private ElasticsearchExceptionTranslator exceptionTranslator;
@ -206,7 +210,7 @@ public class ElasticsearchRestTemplate extends AbstractElasticsearchTemplate {
Assert.notNull(id, "id must not be null");
Assert.notNull(index, "index must not be null");
DeleteRequest request = new DeleteRequest(index.getIndexName(), elasticsearchConverter.convertId(id));
DeleteRequest request = requestFactory.deleteRequest(elasticsearchConverter.convertId(id), index);
return execute(client -> client.delete(request, RequestOptions.DEFAULT).getId());
}
@ -300,9 +304,13 @@ public class ElasticsearchRestTemplate extends AbstractElasticsearchTemplate {
@Override
public void searchScrollClear(List<String> scrollIds) {
ClearScrollRequest request = new ClearScrollRequest();
request.scrollIds(scrollIds);
execute(client -> client.clearScroll(request, RequestOptions.DEFAULT));
try {
ClearScrollRequest request = new ClearScrollRequest();
request.scrollIds(scrollIds);
execute(client -> client.clearScroll(request, RequestOptions.DEFAULT));
} catch (Exception e) {
LOGGER.warn("Could not clear scroll: {}", e.getMessage());
}
}
@Override

View File

@ -86,6 +86,7 @@ import org.springframework.util.Assert;
public class ElasticsearchTemplate extends AbstractElasticsearchTemplate {
private static final Logger QUERY_LOGGER = LoggerFactory
.getLogger("org.springframework.data.elasticsearch.core.QUERY");
private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchTemplate.class);
private Client client;
@Nullable private String searchTimeout;
@ -322,7 +323,11 @@ public class ElasticsearchTemplate extends AbstractElasticsearchTemplate {
@Override
public void searchScrollClear(List<String> scrollIds) {
client.prepareClearScroll().setScrollIds(scrollIds).execute().actionGet();
try {
client.prepareClearScroll().setScrollIds(scrollIds).execute().actionGet();
} catch (Exception e) {
LOGGER.warn("Could not clear scroll: {}", e.getMessage());
}
}
@Override

View File

@ -15,15 +15,11 @@
*/
package org.springframework.data.elasticsearch.core;
import static org.elasticsearch.index.VersionType.*;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -41,19 +37,10 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.core.CountRequest;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.WrapperQueryBuilder;
import org.elasticsearch.index.reindex.BulkByScrollResponse;
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.reactivestreams.Publisher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -61,9 +48,9 @@ import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.data.convert.EntityReader;
import org.springframework.data.domain.Sort;
import org.springframework.data.elasticsearch.ElasticsearchException;
import org.springframework.data.elasticsearch.BulkFailureException;
import org.springframework.data.elasticsearch.NoSuchIndexException;
import org.springframework.data.elasticsearch.UncategorizedElasticsearchException;
import org.springframework.data.elasticsearch.client.reactive.ReactiveElasticsearchClient;
import org.springframework.data.elasticsearch.core.EntityOperations.AdaptibleEntity;
import org.springframework.data.elasticsearch.core.EntityOperations.Entity;
@ -82,10 +69,8 @@ import org.springframework.data.elasticsearch.core.mapping.SimpleElasticsearchMa
import org.springframework.data.elasticsearch.core.query.BulkOptions;
import org.springframework.data.elasticsearch.core.query.CriteriaQuery;
import org.springframework.data.elasticsearch.core.query.IndexQuery;
import org.springframework.data.elasticsearch.core.query.NativeSearchQuery;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.data.elasticsearch.core.query.SeqNoPrimaryTerm;
import org.springframework.data.elasticsearch.core.query.StringQuery;
import org.springframework.data.elasticsearch.core.query.UpdateQuery;
import org.springframework.data.elasticsearch.support.VersionInfo;
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
@ -194,6 +179,7 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
T savedEntity = it.getT1();
IndexResponse indexResponse = it.getT2();
AdaptibleEntity<T> adaptableEntity = operations.forEntity(savedEntity, converter.getConversionService());
// noinspection ReactiveStreamsNullableInLambdaInTransform
return adaptableEntity.populateIdIfNecessary(indexResponse.getId());
}).flatMap(saved -> maybeCallAfterSave(saved, index));
}
@ -268,7 +254,8 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
protected Flux<BulkItemResponse> doBulkOperation(List<?> queries, BulkOptions bulkOptions, IndexCoordinates index) {
BulkRequest bulkRequest = prepareWriteRequest(requestFactory.bulkRequest(queries, bulkOptions, index));
return client.bulk(bulkRequest) //
.onErrorMap(e -> new ElasticsearchException("Error while bulk for request: " + bulkRequest.toString(), e)) //
.onErrorMap(
e -> new UncategorizedElasticsearchException("Error while bulk for request: " + bulkRequest.toString(), e)) //
.flatMap(this::checkForBulkOperationFailure) //
.flatMapMany(response -> Flux.fromArray(response.getItems()));
}
@ -283,7 +270,7 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
failedDocuments.put(item.getId(), item.getFailureMessage());
}
}
ElasticsearchException exception = new ElasticsearchException(
BulkFailureException exception = new BulkFailureException(
"Bulk operation has failures. Use ElasticsearchException.getFailedDocuments() for detailed messages ["
+ failedDocuments + ']',
failedDocuments);
@ -315,9 +302,8 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
return doExists(id, index);
}
private Mono<Boolean> doExists(String id, @Nullable IndexCoordinates index) {
return Mono.defer(() -> doExists(new GetRequest(index.getIndexName(), id)));
private Mono<Boolean> doExists(String id, IndexCoordinates index) {
return Mono.defer(() -> doExists(requestFactory.getRequest(id, index)));
}
/**
@ -334,27 +320,30 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
private <T> Mono<Tuple2<T, IndexResponse>> doIndex(T entity, IndexCoordinates index) {
AdaptibleEntity<?> adaptibleEntity = operations.forEntity(entity, converter.getConversionService());
IndexRequest request = getIndexRequest(entity, adaptibleEntity, index);
IndexRequest request = requestFactory.indexRequest(getIndexQuery(entity), index);
request = prepareIndexRequest(entity, request);
return Mono.just(entity).zipWith(doIndex(request));
}
private IndexRequest getIndexRequest(Object value, AdaptibleEntity<?> entity, IndexCoordinates index) {
private IndexQuery getIndexQuery(Object value) {
AdaptibleEntity<?> entity = operations.forEntity(value, converter.getConversionService());
Object id = entity.getId();
IndexQuery query = new IndexQuery();
IndexRequest request = id != null ? new IndexRequest(index.getIndexName()).id(converter.convertId(id))
: new IndexRequest(index.getIndexName());
request.source(converter.mapObject(value).toJson(), Requests.INDEX_CONTENT_TYPE);
if (id != null) {
query.setId(id.toString());
}
query.setObject(value);
boolean usingSeqNo = false;
if (entity.hasSeqNoPrimaryTerm()) {
SeqNoPrimaryTerm seqNoPrimaryTerm = entity.getSeqNoPrimaryTerm();
if (seqNoPrimaryTerm != null) {
request.setIfSeqNo(seqNoPrimaryTerm.getSequenceNumber());
request.setIfPrimaryTerm(seqNoPrimaryTerm.getPrimaryTerm());
query.setSeqNo(seqNoPrimaryTerm.getSequenceNumber());
query.setPrimaryTerm(seqNoPrimaryTerm.getPrimaryTerm());
usingSeqNo = true;
}
}
@ -364,32 +353,11 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
Number version = entity.getVersion();
if (version != null) {
request.version(version.longValue());
request.versionType(EXTERNAL);
}
}
return request;
}
private IndexQuery getIndexQuery(Object value) {
AdaptibleEntity<?> entity = operations.forEntity(value, converter.getConversionService());
Object id = entity.getId();
IndexQuery query = new IndexQuery();
if (id != null) {
query.setId(id.toString());
}
query.setObject(value);
if (entity.isVersionedEntity()) {
Number version = entity.getVersion();
if (version != null) {
query.setVersion(version.longValue());
}
}
return query;
}
@ -410,9 +378,7 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
}
private Mono<GetResult> doGet(String id, ElasticsearchPersistentEntity<?> entity, IndexCoordinates index) {
return Mono.defer(() -> {
return doGet(new GetRequest(index.getIndexName(), id));
});
return Mono.defer(() -> doGet(requestFactory.getRequest(id, index)));
}
/**
@ -465,8 +431,8 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
private Mono<String> doDeleteById(String id, IndexCoordinates index) {
return Mono.defer(() -> {
return doDelete(prepareDeleteRequest(new DeleteRequest(index.getIndexName(), id)));
DeleteRequest request = requestFactory.deleteRequest(id, index);
return doDelete(prepareDeleteRequest(request));
});
}
@ -479,8 +445,7 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
Assert.notNull(query, "Query must not be null!");
return doDeleteBy(query, getPersistentEntityFor(entityType), index).map(BulkByScrollResponse::getDeleted)
.publishNext();
return doDeleteBy(query, entityType, index).map(BulkByScrollResponse::getDeleted).publishNext();
}
@Override
@ -488,13 +453,10 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
return delete(query, entityType, getIndexCoordinatesFor(entityType));
}
private Flux<BulkByScrollResponse> doDeleteBy(Query query, ElasticsearchPersistentEntity<?> entity,
IndexCoordinates index) {
private Flux<BulkByScrollResponse> doDeleteBy(Query query, Class<?> entityType, IndexCoordinates index) {
return Flux.defer(() -> {
DeleteByQueryRequest request = new DeleteByQueryRequest(index.getIndexNames());
request.setQuery(mappedQuery(query, entity));
DeleteByQueryRequest request = requestFactory.deleteByQueryRequest(query, entityType, index);
return doDeleteBy(prepareDeleteByRequest(request));
});
}
@ -552,8 +514,13 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
*/
protected DeleteByQueryRequest prepareDeleteByRequest(DeleteByQueryRequest request) {
if (refreshPolicy != null && !RefreshPolicy.NONE.equals(refreshPolicy)) {
request = request.setRefresh(true);
if (refreshPolicy != null) {
if (RefreshPolicy.NONE.equals(refreshPolicy)) {
request = request.setRefresh(false);
} else {
request = request.setRefresh(true);
}
}
if (indicesOptions != null) {
@ -661,43 +628,6 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
});
}
private CountRequest buildCountRequest(Query query, ElasticsearchPersistentEntity<?> entity, IndexCoordinates index) {
CountRequest request = new CountRequest(index.getIndexNames());
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.query(mappedQuery(query, entity));
searchSourceBuilder.trackScores(query.getTrackScores());
QueryBuilder postFilterQuery = mappedFilterQuery(query, entity);
if (postFilterQuery != null) {
searchSourceBuilder.postFilter(postFilterQuery);
}
if (query.getSourceFilter() != null) {
searchSourceBuilder.fetchSource(query.getSourceFilter().getIncludes(), query.getSourceFilter().getExcludes());
}
if (query instanceof NativeSearchQuery && ((NativeSearchQuery) query).getCollapseBuilder() != null) {
searchSourceBuilder.collapse(((NativeSearchQuery) query).getCollapseBuilder());
}
sort(query, entity).forEach(searchSourceBuilder::sort);
if (query.getMinScore() > 0) {
searchSourceBuilder.minScore(query.getMinScore());
}
if (query.getIndicesOptions() != null) {
request.indicesOptions(query.getIndicesOptions());
}
if (query.getPreference() != null) {
request.preference(query.getPreference());
}
request.source(searchSourceBuilder);
return request;
}
/**
* Customization hook on the actual execution result {@link Publisher}. <br />
*
@ -762,61 +692,6 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
.map(DocumentAdapters::from).onErrorResume(NoSuchIndexException.class, it -> Mono.empty());
}
@Nullable
private QueryBuilder mappedFilterQuery(Query query, ElasticsearchPersistentEntity<?> entity) {
if (query instanceof NativeSearchQuery) {
return ((NativeSearchQuery) query).getFilter();
}
return null;
}
private QueryBuilder mappedQuery(Query query, ElasticsearchPersistentEntity<?> entity) {
QueryBuilder elasticsearchQuery = null;
if (query instanceof CriteriaQuery) {
converter.updateQuery((CriteriaQuery) query, entity.getType());
elasticsearchQuery = new CriteriaQueryProcessor().createQueryFromCriteria(((CriteriaQuery) query).getCriteria());
} else if (query instanceof StringQuery) {
elasticsearchQuery = new WrapperQueryBuilder(((StringQuery) query).getSource());
} else if (query instanceof NativeSearchQuery) {
elasticsearchQuery = ((NativeSearchQuery) query).getQuery();
} else {
throw new IllegalArgumentException(String.format("Unknown query type '%s'.", query.getClass()));
}
return elasticsearchQuery != null ? elasticsearchQuery : QueryBuilders.matchAllQuery();
}
private static List<FieldSortBuilder> sort(Query query, ElasticsearchPersistentEntity<?> entity) {
if (query.getSort() == null || query.getSort().isUnsorted()) {
return Collections.emptyList();
}
List<FieldSortBuilder> mappedSort = new ArrayList<>();
for (Sort.Order order : query.getSort()) {
ElasticsearchPersistentProperty property = entity.getPersistentProperty(order.getProperty());
String fieldName = property != null ? property.getFieldName() : order.getProperty();
FieldSortBuilder sort = SortBuilders.fieldSort(fieldName)
.order(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC);
if (order.getNullHandling() == Sort.NullHandling.NULLS_FIRST) {
sort.missing("_first");
} else if (order.getNullHandling() == Sort.NullHandling.NULLS_LAST) {
sort.missing("_last");
}
mappedSort.add(sort);
}
return mappedSort;
}
/**
* Customization hook to modify a generated {@link SearchRequest} prior to its execution. Eg. by setting the
* {@link SearchRequest#indicesOptions(IndicesOptions) indices options} if applicable.
@ -950,7 +825,6 @@ public class ReactiveElasticsearchTemplate implements ReactiveElasticsearchOpera
return Mono.just(entity);
}
// endregion
protected interface DocumentCallback<T> {

View File

@ -29,6 +29,7 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetRequestBuilder;
import org.elasticsearch.action.get.MultiGetRequest;
@ -249,6 +250,10 @@ class RequestFactory {
return deleteByQueryRequest;
}
public DeleteRequest deleteRequest(String id, IndexCoordinates index) {
return new DeleteRequest(index.getIndexName(), id);
}
@Deprecated
public DeleteByQueryRequestBuilder deleteByQueryRequestBuilder(Client client, DeleteQuery deleteQuery,
IndexCoordinates index) {
@ -344,6 +349,7 @@ class RequestFactory {
throw new ElasticsearchException(
"object or source is null, failed to index the document [id: " + query.getId() + ']');
}
if (query.getVersion() != null) {
indexRequest.version(query.getVersion());
VersionType versionType = retrieveVersionTypeFromPersistentEntity(query.getObject().getClass());
@ -353,6 +359,7 @@ class RequestFactory {
if (query.getSeqNo() != null) {
indexRequest.setIfSeqNo(query.getSeqNo());
}
if (query.getPrimaryTerm() != null) {
indexRequest.setIfPrimaryTerm(query.getPrimaryTerm());
}

View File

@ -38,8 +38,8 @@ public abstract class ResourceUtil {
/**
* Read a {@link ClassPathResource} into a {@link String}.
*
* @param url
* @return
* @param url url the file url
* @return the contents of the file or null if it could not be read
*/
@Nullable
public static String readFileFromClasspath(String url) {
@ -48,7 +48,7 @@ public abstract class ResourceUtil {
try (InputStream is = classPathResource.getInputStream()) {
return StreamUtils.copyToString(is, Charset.defaultCharset());
} catch (Exception e) {
LOGGER.debug(String.format("Failed to load file from url: %s: %s", url, e.getMessage()));
LOGGER.warn(String.format("Failed to load file from url: %s: %s", url, e.getMessage()));
return null;
}
}

View File

@ -18,6 +18,7 @@ package org.springframework.data.elasticsearch.core;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.function.Function;
@ -38,13 +39,15 @@ abstract class StreamQueries {
/**
* Stream query results using {@link SearchScrollHits}.
*
* @param maxCount the maximum number of entities to return, a value of 0 means that all available entities are
* returned
* @param searchHits the initial hits
* @param continueScrollFunction function to continue scrolling applies to the current scrollId.
* @param clearScrollConsumer consumer to clear the scroll context by accepting the scrollIds to clear.
* @param <T>
* @param <T> the entity type
* @return the {@link SearchHitsIterator}.
*/
static <T> SearchHitsIterator<T> streamResults(SearchScrollHits<T> searchHits,
static <T> SearchHitsIterator<T> streamResults(int maxCount, SearchScrollHits<T> searchHits,
Function<String, SearchScrollHits<T>> continueScrollFunction, Consumer<List<String>> clearScrollConsumer) {
Assert.notNull(searchHits, "searchHits must not be null.");
@ -59,20 +62,14 @@ abstract class StreamQueries {
return new SearchHitsIterator<T>() {
// As we couldn't retrieve single result with scroll, store current hits.
private volatile Iterator<SearchHit<T>> scrollHits = searchHits.iterator();
private volatile boolean continueScroll = scrollHits.hasNext();
private volatile AtomicInteger currentCount = new AtomicInteger();
private volatile Iterator<SearchHit<T>> currentScrollHits = searchHits.iterator();
private volatile boolean continueScroll = currentScrollHits.hasNext();
private volatile ScrollState scrollState = new ScrollState(searchHits.getScrollId());
@Override
public void close() {
try {
clearScrollConsumer.accept(scrollState.getScrollIds());
} finally {
scrollHits = null;
scrollState = null;
}
clearScrollConsumer.accept(scrollState.getScrollIds());
}
@Override
@ -99,24 +96,25 @@ abstract class StreamQueries {
@Override
public boolean hasNext() {
if (!continueScroll) {
if (!continueScroll || (maxCount > 0 && currentCount.get() >= maxCount)) {
return false;
}
if (!scrollHits.hasNext()) {
if (!currentScrollHits.hasNext()) {
SearchScrollHits<T> nextPage = continueScrollFunction.apply(scrollState.getScrollId());
scrollHits = nextPage.iterator();
currentScrollHits = nextPage.iterator();
scrollState.updateScrollId(nextPage.getScrollId());
continueScroll = scrollHits.hasNext();
continueScroll = currentScrollHits.hasNext();
}
return scrollHits.hasNext();
return currentScrollHits.hasNext();
}
@Override
public SearchHit<T> next() {
if (hasNext()) {
return scrollHits.next();
currentCount.incrementAndGet();
return currentScrollHits.next();
}
throw new NoSuchElementException();
}

View File

@ -15,6 +15,7 @@
*/
package org.springframework.data.elasticsearch.core.convert;
import java.time.temporal.TemporalAccessor;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -26,7 +27,10 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ApplicationContext;
@ -77,6 +81,8 @@ import org.springframework.util.ObjectUtils;
public class MappingElasticsearchConverter
implements ElasticsearchConverter, ApplicationContextAware, InitializingBean {
private static final Logger LOGGER = LoggerFactory.getLogger(MappingElasticsearchConverter.class);
private final MappingContext<? extends ElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> mappingContext;
private final GenericConversionService conversionService;
@ -85,6 +91,8 @@ public class MappingElasticsearchConverter
private ElasticsearchTypeMapper typeMapper;
private ConcurrentHashMap<String, Integer> propertyWarnings = new ConcurrentHashMap<>();
public MappingElasticsearchConverter(
MappingContext<? extends ElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> mappingContext) {
this(mappingContext, null);
@ -267,11 +275,26 @@ public class MappingElasticsearchConverter
return null;
}
Class<R> rawType = targetType.getType();
if (property.hasPropertyConverter() && String.class.isAssignableFrom(source.getClass())) {
source = property.getPropertyConverter().read((String) source);
} else if (TemporalAccessor.class.isAssignableFrom(property.getType())
&& !conversions.hasCustomReadTarget(source.getClass(), rawType)) {
// log at most 5 times
String propertyName = property.getOwner().getType().getSimpleName() + '.' + property.getName();
String key = propertyName + "-read";
int count = propertyWarnings.computeIfAbsent(key, k -> 0);
if (count < 5) {
LOGGER.warn(
"Type {} of property {} is a TemporalAccessor class but has neither a @Field annotation defining the date type nor a registered converter for reading!"
+ " It cannot be mapped from a complex object in Elasticsearch!",
property.getType().getSimpleName(), propertyName);
propertyWarnings.put(key, count + 1);
}
}
Class<R> rawType = targetType.getType();
if (conversions.hasCustomReadTarget(source.getClass(), rawType)) {
return rawType.cast(conversionService.convert(source, rawType));
} else if (source instanceof List) {
@ -293,14 +316,17 @@ public class MappingElasticsearchConverter
}
Collection<Object> target = createCollectionForValue(targetType, source.size());
TypeInformation<?> componentType = targetType.getComponentType();
for (Object value : source) {
if (value == null) {
target.add(null);
} else if (componentType != null && !ClassTypeInformation.OBJECT.equals(componentType)
&& isSimpleType(componentType.getType())) {
target.add(readSimpleValue(value, componentType));
} else if (isSimpleType(value)) {
target.add(
readSimpleValue(value, targetType.getComponentType() != null ? targetType.getComponentType() : targetType));
target.add(readSimpleValue(value, componentType != null ? componentType : targetType));
} else {
if (value instanceof List) {
@ -473,6 +499,20 @@ public class MappingElasticsearchConverter
if (property.hasPropertyConverter()) {
ElasticsearchPersistentPropertyConverter propertyConverter = property.getPropertyConverter();
value = propertyConverter.write(value);
} else if (TemporalAccessor.class.isAssignableFrom(property.getType())
&& !conversions.hasCustomWriteTarget(value.getClass())) {
// log at most 5 times
String propertyName = entity.getType().getSimpleName() + '.' + property.getName();
String key = propertyName + "-write";
int count = propertyWarnings.computeIfAbsent(key, k -> 0);
if (count < 5) {
LOGGER.warn(
"Type {} of property {} is a TemporalAccessor class but has neither a @Field annotation defining the date type nor a registered converter for writing!"
+ " It will be mapped to a complex object in Elasticsearch!",
property.getType().getSimpleName(), propertyName);
propertyWarnings.put(key, count + 1);
}
}
if (!isSimpleType(value)) {
@ -556,7 +596,9 @@ public class MappingElasticsearchConverter
Map<Object, Object> target = new LinkedHashMap<>();
Streamable<Entry<String, Object>> mapSource = Streamable.of(value.entrySet());
if (!typeHint.getActualType().getType().equals(Object.class)
TypeInformation<?> actualType = typeHint.getActualType();
if (actualType != null && !actualType.getType().equals(Object.class)
&& isSimpleType(typeHint.getMapValueType().getType())) {
mapSource.forEach(it -> {
@ -595,8 +637,14 @@ public class MappingElasticsearchConverter
: Streamable.of(ObjectUtils.toObjectArray(value));
List<Object> target = new ArrayList<>();
if (!typeHint.getActualType().getType().equals(Object.class) && isSimpleType(typeHint.getActualType().getType())) {
collectionSource.map(this::getWriteSimpleValue).forEach(target::add);
TypeInformation<?> actualType = typeHint.getActualType();
Class<?> type = actualType != null ? actualType.getType() : null;
if (type != null && !type.equals(Object.class) && isSimpleType(type)) {
// noinspection ReturnOfNull
collectionSource //
.map(element -> element != null ? getWriteSimpleValue(element) : null) //
.forEach(target::add);
} else {
collectionSource.map(it -> {
@ -670,10 +718,6 @@ public class MappingElasticsearchConverter
/**
* Compute the type to use by checking the given entity against the store type;
*
* @param entity
* @param source
* @return
*/
private ElasticsearchPersistentEntity<?> computeClosestEntity(ElasticsearchPersistentEntity<?> entity,
Map<String, Object> source) {

View File

@ -17,6 +17,8 @@ package org.springframework.data.elasticsearch.core.geo;
import org.springframework.data.geo.Point;
import java.util.Objects;
/**
* geo-location used for #{@link org.springframework.data.elasticsearch.core.query.Criteria}.
*
@ -60,6 +62,20 @@ public class GeoPoint {
return new Point(point.getLat(), point.getLon());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GeoPoint geoPoint = (GeoPoint) o;
return Double.compare(geoPoint.lat, lat) == 0 &&
Double.compare(geoPoint.lon, lon) == 0;
}
@Override
public int hashCode() {
return Objects.hash(lat, lon);
}
@Override
public String toString() {
return "GeoPoint{" +

View File

@ -117,9 +117,16 @@ public class SimpleElasticsearchPersistentProperty extends
boolean isTemporalAccessor = TemporalAccessor.class.isAssignableFrom(getType());
boolean isDate = Date.class.isAssignableFrom(getType());
if (field != null && field.type() == FieldType.Date && (isTemporalAccessor || isDate)) {
if (field != null && (field.type() == FieldType.Date || field.type() == FieldType.Date_Nanos)
&& (isTemporalAccessor || isDate)) {
DateFormat dateFormat = field.format();
if (dateFormat == DateFormat.none) {
throw new MappingException(
String.format("Property %s is annotated with FieldType.%s but has no DateFormat defined",
getOwner().getType().getSimpleName() + "." + getName(), field.type().name()));
}
ElasticsearchDateConverter converter = null;
if (dateFormat == DateFormat.custom) {

View File

@ -22,6 +22,7 @@ import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
@ -88,7 +89,7 @@ public abstract class AbstractElasticsearchRepository<T, ID> implements Elastics
this.entityClass = this.entityInformation.getJavaType();
this.indexOperations = operations.indexOps(this.entityClass);
try {
if (createIndexAndMapping()) {
if (createIndexAndMapping() && !indexOperations.exists()) {
createIndex();
putMapping();
}
@ -153,9 +154,21 @@ public abstract class AbstractElasticsearchRepository<T, ID> implements Elastics
@Override
public Iterable<T> findAllById(Iterable<ID> ids) {
Assert.notNull(ids, "ids can't be null.");
NativeSearchQuery query = new NativeSearchQueryBuilder().withIds(stringIdsRepresentation(ids)).build();
return operations.multiGet(query, getEntityClass(), getIndexCoordinates());
List<T> result = new ArrayList<>();
List<T> multiGetEntities = operations.multiGet(query, getEntityClass(), getIndexCoordinates());
multiGetEntities.forEach(entity -> {
if (entity != null) {
result.add(entity);
}
});
return result;
}
@Override

View File

@ -1,6 +1,93 @@
Spring Data Elasticsearch Changelog
===================================
Changes in version 4.0.2.RELEASE (2020-07-22)
---------------------------------------------
* DATAES-883 - Fix log level on resource load error.
* DATAES-878 - Wrong value for TermVector(woth_positions_offsets).
* DATAES-865 - Fix MappingElasticsearchConverter writing an Object property containing a Map.
* DATAES-863 - Improve server error response handling.
* DATAES-862 - Release 4.0.2 (Neumann SR2).
Changes in version 3.2.9.RELEASE (2020-07-22)
---------------------------------------------
* DATAES-861 - Release 3.2.9 (Moore SR9).
Changes in version 3.1.19.RELEASE (2020-07-22)
----------------------------------------------
* DATAES-860 - Release 3.1.19 (Lovelace SR19).
Changes in version 4.1.0-M1 (2020-06-25)
----------------------------------------
* DATAES-870 - Workaround for reactor-netty error.
* DATAES-868 - Upgrade to Netty 4.1.50.Final.
* DATAES-867 - Adopt to changes in Reactor Netty 1.0.
* DATAES-866 - Implement suggest search in reactive client.
* DATAES-865 - Fix MappingElasticsearchConverter writing an Object property containing a Map.
* DATAES-863 - Improve server error response handling.
* DATAES-859 - Don't use randomNumeric() in tests.
* DATAES-858 - Use standard Spring code of conduct.
* DATAES-857 - Registered simple types are not read from list.
* DATAES-853 - Cleanup tests that do not delete test indices.
* DATAES-852 - Upgrade to Elasticsearch 7.7.1.
* DATAES-850 - Add warning and documentation for missing TemporalAccessor configuration.
* DATAES-848 - Add the name of the index to SearchHit.
* DATAES-847 - Add missing DateFormat values.
* DATAES-845 - MappingElasticsearchConverter crashes when writing lists containing null values.
* DATAES-844 - Improve TOC formatting for migration guides.
* DATAES-841 - Remove deprecated type mappings code.
* DATAES-840 - Consolidate index name SpEL resolution.
* DATAES-839 - ReactiveElasticsearchTemplate should use RequestFactory.
* DATAES-838 - Update to Elasticsearch 7.7.0.
* DATAES-836 - Fix typo in Javadocs.
* DATAES-835 - Fix code sample in documentation for scroll API.
* DATAES-832 - findAllById repository method returns iterable with null elements for not found ids.
* DATAES-831 - SearchOperations.searchForStream does not use requested maxResults.
* DATAES-829 - Deprecate AbstractElasticsearchRepository and cleanup SimpleElasticsearchRepository.
* DATAES-828 - Fields of type date need to have a format defined.
* DATAES-827 - Repositories should not try to create an index when it already exists.
* DATAES-826 - Add method to IndexOperations to write an index mapping from a entity class.
* DATAES-825 - Update readme to use latest spring.io docs.
* DATAES-824 - Release 4.1 M1 (2020.0.0).
* DATAES-678 - Introduce ReactiveIndexOperations.
* DATAES-263 - Inner Hits support.
Changes in version 4.0.1.RELEASE (2020-06-10)
---------------------------------------------
* DATAES-857 - Registered simple types are not read from list.
* DATAES-850 - Add warning and documentation for missing TemporalAccessor configuration.
* DATAES-845 - MappingElasticsearchConverter crashes when writing lists containing null values.
* DATAES-844 - Improve TOC formatting for migration guides.
* DATAES-839 - ReactiveElasticsearchTemplate should use RequestFactory.
* DATAES-835 - Fix code sample in documentation for scroll API.
* DATAES-832 - findAllById repository method returns iterable with null elements for not found ids.
* DATAES-831 - SearchOperations.searchForStream does not use requested maxResults.
* DATAES-828 - Fields of type date need to have a format defined.
* DATAES-827 - Repositories should not try to create an index when it already exists.
* DATAES-823 - Release 4.0.1 (Neumann SR1).
Changes in version 3.2.8.RELEASE (2020-06-10)
---------------------------------------------
* DATAES-851 - Upgrade to Elasticsearch 6.8.10.
* DATAES-837 - Update to Elasticsearch 6.8.9.
* DATAES-821 - Fix code for adding an alias.
* DATAES-811 - Remove Travis CI.
* DATAES-807 - Release 3.2.8 (Moore SR8).
* DATAES-776 - Adapt RestClients class to change in InetSocketAddress class in JDK14.
* DATAES-767 - Fix ReactiveElasticsearch handling of 4xx HTTP responses.
Changes in version 3.1.18.RELEASE (2020-06-10)
----------------------------------------------
* DATAES-811 - Remove Travis CI.
* DATAES-806 - Release 3.1.18 (Lovelace SR18).
Changes in version 4.0.0.RELEASE (2020-05-12)
---------------------------------------------
* DATAES-822 - ElasticsearchRestTemplate should not use `spring-web`.
@ -1137,5 +1224,11 @@ Release Notes - Spring Data Elasticsearch - Version 1.0 M1 (2014-02-07)

View File

@ -1,4 +1,4 @@
Spring Data Elasticsearch 4.0 GA
Spring Data Elasticsearch 4.0.2 (Neumann SR2)
Copyright (c) [2013-2019] Pivotal Software, Inc.
This product is licensed to you under the Apache License, Version 2.0 (the "License").
@ -15,3 +15,5 @@ conditions of the subcomponent's license, as noted in the LICENSE file.

View File

@ -77,7 +77,7 @@ import org.springframework.data.elasticsearch.annotations.ScriptedField;
import org.springframework.data.elasticsearch.core.geo.GeoPoint;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.*;
import org.springframework.data.util.CloseableIterator;
import org.springframework.data.util.StreamUtils;
import org.springframework.lang.Nullable;
/**
@ -1298,27 +1298,33 @@ public abstract class ElasticsearchTemplateTests {
assertThat(sampleEntities).hasSize(30);
}
@Test // DATAES-167
public void shouldReturnResultsWithStreamForGivenCriteriaQuery() {
@Test // DATAES-167, DATAES-831
public void shouldReturnAllResultsWithStreamForGivenCriteriaQuery() {
// given
List<IndexQuery> entities = createSampleEntitiesWithMessage("Test message", 30);
// when
operations.bulkIndex(entities, index);
operations.bulkIndex(createSampleEntitiesWithMessage("Test message", 30), index);
indexOperations.refresh();
// then
CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria());
criteriaQuery.setPageable(PageRequest.of(0, 10));
CloseableIterator<SearchHit<SampleEntity>> stream = operations.searchForStream(criteriaQuery, SampleEntity.class,
index);
List<SearchHit<SampleEntity>> sampleEntities = new ArrayList<>();
while (stream.hasNext()) {
sampleEntities.add(stream.next());
}
assertThat(sampleEntities).hasSize(30);
long count = StreamUtils
.createStreamFromIterator(operations.searchForStream(criteriaQuery, SampleEntity.class, index)).count();
assertThat(count).isEqualTo(30);
}
@Test // DATAES-831
void shouldLimitStreamResultToRequestedSize() {
operations.bulkIndex(createSampleEntitiesWithMessage("Test message", 30), index);
indexOperations.refresh();
CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria());
criteriaQuery.setMaxResults(10);
long count = StreamUtils
.createStreamFromIterator(operations.searchForStream(criteriaQuery, SampleEntity.class, index)).count();
assertThat(count).isEqualTo(10);
}
private static List<IndexQuery> createSampleEntitiesWithMessage(String message, int numberOfEntities) {
@ -3128,8 +3134,8 @@ public abstract class ElasticsearchTemplateTests {
operations.refresh(OptimisticEntity.class);
List<Query> queries = singletonList(queryForOne(saved.getId()));
List<SearchHits<OptimisticEntity>> retrievedHits = operations.multiSearch(queries,
OptimisticEntity.class, operations.getIndexCoordinatesFor(OptimisticEntity.class));
List<SearchHits<OptimisticEntity>> retrievedHits = operations.multiSearch(queries, OptimisticEntity.class,
operations.getIndexCoordinatesFor(OptimisticEntity.class));
OptimisticEntity retrieved = retrievedHits.get(0).getSearchHit(0).getContent();
assertThatSeqNoPrimaryTermIsFilled(retrieved);
@ -3162,8 +3168,7 @@ public abstract class ElasticsearchTemplateTests {
operations.save(forEdit1);
forEdit2.setMessage("It'll be great");
assertThatThrownBy(() -> operations.save(forEdit2))
.isInstanceOf(OptimisticLockingFailureException.class);
assertThatThrownBy(() -> operations.save(forEdit2)).isInstanceOf(OptimisticLockingFailureException.class);
}
@Test // DATAES-799
@ -3179,8 +3184,7 @@ public abstract class ElasticsearchTemplateTests {
operations.save(forEdit1);
forEdit2.setMessage("It'll be great");
assertThatThrownBy(() -> operations.save(forEdit2))
.isInstanceOf(OptimisticLockingFailureException.class);
assertThatThrownBy(() -> operations.save(forEdit2)).isInstanceOf(OptimisticLockingFailureException.class);
}
@Test // DATAES-799

View File

@ -34,6 +34,7 @@ import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.dao.DataAccessException;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.DateFormat;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
@ -146,7 +147,7 @@ public class LogEntityTests {
@Field(type = Ip) private String ip;
@Field(type = Date) private java.util.Date date;
@Field(type = Date, format = DateFormat.date_time) private java.util.Date date;
private LogEntity() {}

View File

@ -25,6 +25,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junit.jupiter.api.Test;
import org.springframework.data.util.StreamUtils;
/**
* @author Sascha Woo
@ -45,6 +46,7 @@ public class StreamQueriesTest {
// when
SearchHitsIterator<String> iterator = StreamQueries.streamResults( //
0, //
searchHits, //
scrollId -> newSearchScrollHits(Collections.emptyList(), scrollId), //
scrollIds -> clearScrollCalled.set(true));
@ -70,6 +72,7 @@ public class StreamQueriesTest {
// when
SearchHitsIterator<String> iterator = StreamQueries.streamResults( //
0, //
searchHits, //
scrollId -> newSearchScrollHits(Collections.emptyList(), scrollId), //
scrollId -> {});
@ -90,10 +93,12 @@ public class StreamQueriesTest {
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-2");
SearchScrollHits<String> searchHits4 = newSearchScrollHits(Collections.emptyList(), "s-3");
Iterator<SearchScrollHits<String>> searchScrollHitsIterator = Arrays.asList(searchHits1, searchHits2, searchHits3,searchHits4).iterator();
Iterator<SearchScrollHits<String>> searchScrollHitsIterator = Arrays
.asList(searchHits1, searchHits2, searchHits3, searchHits4).iterator();
List<String> clearedScrollIds = new ArrayList<>();
SearchHitsIterator<String> iterator = StreamQueries.streamResults( //
0, //
searchScrollHitsIterator.next(), //
scrollId -> searchScrollHitsIterator.next(), //
scrollIds -> clearedScrollIds.addAll(scrollIds));
@ -106,6 +111,56 @@ public class StreamQueriesTest {
assertThat(clearedScrollIds).isEqualTo(Arrays.asList("s-1", "s-2", "s-3"));
}
@Test // DATAES-831
void shouldReturnAllForRequestedSizeOf0() {
SearchScrollHits<String> searchHits1 = newSearchScrollHits(
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-1");
SearchScrollHits<String> searchHits2 = newSearchScrollHits(
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-2");
SearchScrollHits<String> searchHits3 = newSearchScrollHits(
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-2");
SearchScrollHits<String> searchHits4 = newSearchScrollHits(Collections.emptyList(), "s-3");
Iterator<SearchScrollHits<String>> searchScrollHitsIterator = Arrays
.asList(searchHits1, searchHits2, searchHits3, searchHits4).iterator();
SearchHitsIterator<String> iterator = StreamQueries.streamResults( //
0, //
searchScrollHitsIterator.next(), //
scrollId -> searchScrollHitsIterator.next(), //
scrollIds -> {});
long count = StreamUtils.createStreamFromIterator(iterator).count();
assertThat(count).isEqualTo(3);
}
@Test // DATAES-831
void shouldOnlyReturnRequestedCount() {
SearchScrollHits<String> searchHits1 = newSearchScrollHits(
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-1");
SearchScrollHits<String> searchHits2 = newSearchScrollHits(
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-2");
SearchScrollHits<String> searchHits3 = newSearchScrollHits(
Collections.singletonList(new SearchHit<String>(null, 0, null, null, "one")), "s-2");
SearchScrollHits<String> searchHits4 = newSearchScrollHits(Collections.emptyList(), "s-3");
Iterator<SearchScrollHits<String>> searchScrollHitsIterator = Arrays
.asList(searchHits1, searchHits2, searchHits3, searchHits4).iterator();
SearchHitsIterator<String> iterator = StreamQueries.streamResults( //
2, //
searchScrollHitsIterator.next(), //
scrollId -> searchScrollHitsIterator.next(), //
scrollIds -> {});
long count = StreamUtils.createStreamFromIterator(iterator).count();
assertThat(count).isEqualTo(2);
}
private SearchScrollHits<String> newSearchScrollHits(List<SearchHit<String>> hits, String scrollId) {
return new SearchHitsImpl<String>(hits.size(), TotalHitsRelation.EQUAL_TO, 0, scrollId, hits, null);
}

View File

@ -718,6 +718,101 @@ public class MappingElasticsearchConverterUnitTests {
assertThat(entity.seqNoPrimaryTerm).isNull();
}
@Test // DATAES-845
void shouldWriteCollectionsWithNullValues() throws JSONException {
EntityWithListProperty entity = new EntityWithListProperty();
entity.setId("42");
entity.setValues(Arrays.asList(null, "two", null, "four"));
String expected = '{' + //
" \"id\": \"42\"," + //
" \"values\": [null, \"two\", null, \"four\"]" + //
'}';
Document document = Document.create();
mappingElasticsearchConverter.write(entity, document);
String json = document.toJson();
assertEquals(expected, json, false);
}
@Test // DATAES-857
void shouldWriteEntityWithListOfGeoPoints() throws JSONException {
GeoPointListEntity entity = new GeoPointListEntity();
entity.setId("42");
List<GeoPoint> locations = Arrays.asList(new GeoPoint(12.34, 23.45), new GeoPoint(34.56, 45.67));
entity.setLocations(locations);
String expected = "{\n" + //
" \"id\": \"42\",\n" + //
" \"locations\": [\n" + //
" {\n" + //
" \"lat\": 12.34,\n" + //
" \"lon\": 23.45\n" + //
" },\n" + //
" {\n" + //
" \"lat\": 34.56,\n" + //
" \"lon\": 45.67\n" + //
" }\n" + //
" ]\n" + //
"}"; //
Document document = Document.create();
mappingElasticsearchConverter.write(entity, document);
String json = document.toJson();
assertEquals(expected, json, false);
}
@Test // DATAES-857
void shouldReadEntityWithListOfGeoPoints() {
String json = "{\n" + //
" \"id\": \"42\",\n" + //
" \"locations\": [\n" + //
" {\n" + //
" \"lat\": 12.34,\n" + //
" \"lon\": 23.45\n" + //
" },\n" + //
" {\n" + //
" \"lat\": 34.56,\n" + //
" \"lon\": 45.67\n" + //
" }\n" + //
" ]\n" + //
"}"; //
Document document = Document.parse(json);
GeoPointListEntity entity = mappingElasticsearchConverter.read(GeoPointListEntity.class, document);
assertThat(entity.id).isEqualTo("42");
assertThat(entity.locations).containsExactly(new GeoPoint(12.34, 23.45), new GeoPoint(34.56, 45.67));
}
@Test // DATAES-865
void shouldWriteEntityWithMapAsObject() throws JSONException {
Map<String, Object> map = new LinkedHashMap<>();
map.put("foo", "bar");
EntityWithObject entity = new EntityWithObject();
entity.setId("42");
entity.setContent(map);
String expected = "{\n" + //
" \"id\": \"42\",\n" + //
" \"content\": {\n" + //
" \"foo\": \"bar\"\n" + //
" }\n" + //
"}\n"; //
Document document = Document.create();
mappingElasticsearchConverter.write(entity, document);
assertEquals(expected, document.toJson(), false);
}
private String pointTemplate(String name, Point point) {
return String.format(Locale.ENGLISH, "\"%s\":{\"lat\":%.1f,\"lon\":%.1f}", name, point.getX(), point.getY());
}
@ -932,4 +1027,23 @@ public class MappingElasticsearchConverterUnitTests {
@Nullable private SeqNoPrimaryTerm seqNoPrimaryTerm;
}
@Data
static class EntityWithListProperty {
@Id private String id;
private List<String> values;
}
@Data
static class GeoPointListEntity {
@Id String id;
List<GeoPoint> locations;
}
@Data
static class EntityWithObject {
@Id private String id;
private Object content;
}
}

View File

@ -859,7 +859,7 @@ public class MappingBuilderTests extends MappingContextBaseTests {
@Nullable @Id private String id;
@Nullable @Field(type = FieldType.Date, index = false) private Date createdDate;
@Nullable @Field(type = FieldType.Date, format = DateFormat.date_time, index = false) private Date createdDate;
@Nullable
public String getId() {

View File

@ -41,10 +41,10 @@ public class SimpleElasticsearchDateMappingTests extends MappingContextBaseTests
private static final String EXPECTED_MAPPING = "{\"properties\":{\"message\":{\"store\":true,"
+ "\"type\":\"text\",\"index\":false,\"analyzer\":\"standard\"},\"customFormatDate\":{\"type\":\"date\",\"format\":\"dd.MM.uuuu hh:mm\"},"
+ "\"defaultFormatDate\":{\"type\":\"date\"},\"basicFormatDate\":{\""
+ "\"basicFormatDate\":{\""
+ "type\":\"date\",\"format\":\"basic_date\"}}}";
@Test // DATAES-568
@Test // DATAES-568, DATAES-828
public void testCorrectDateMappings() {
String mapping = getMappingBuilder().buildPropertyMapping(SampleDateMappingEntity.class);
@ -67,8 +67,6 @@ public class SimpleElasticsearchDateMappingTests extends MappingContextBaseTests
@Field(type = Date, format = DateFormat.custom,
pattern = "dd.MM.uuuu hh:mm") private LocalDateTime customFormatDate;
@Field(type = FieldType.Date) private LocalDateTime defaultFormatDate;
@Field(type = FieldType.Date, format = DateFormat.basic_date) private LocalDateTime basicFormatDate;
}
}

View File

@ -173,6 +173,20 @@ public class SimpleElasticsearchPersistentPropertyUnitTests {
assertThat(seqNoProperty.isReadable()).isFalse();
}
@Test // DATAES-828
void shouldRequireFormatForDateField() {
assertThatExceptionOfType(MappingException.class) //
.isThrownBy(() -> context.getRequiredPersistentEntity(DateFieldWithNoFormat.class)) //
.withMessageContaining("date");
}
@Test // DATAES-828
void shouldRequireFormatForDateNanosField() {
assertThatExceptionOfType(MappingException.class) //
.isThrownBy(() -> context.getRequiredPersistentEntity(DateNanosFieldWithNoFormat.class)) //
.withMessageContaining("date");
}
static class InvalidScoreProperty {
@Nullable @Score String scoreProperty;
}
@ -195,4 +209,12 @@ public class SimpleElasticsearchPersistentPropertyUnitTests {
SeqNoPrimaryTerm seqNoPrimaryTerm;
String string;
}
static class DateFieldWithNoFormat {
@Field(type = FieldType.Date) LocalDateTime datetime;
}
static class DateNanosFieldWithNoFormat {
@Field(type = FieldType.Date_Nanos) LocalDateTime datetime;
}
}

View File

@ -25,12 +25,14 @@ import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.IOException;
import java.lang.Long;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
@ -56,6 +58,7 @@ import org.springframework.data.elasticsearch.junit.jupiter.SpringIntegrationTes
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
import org.springframework.data.elasticsearch.repository.config.EnableElasticsearchRepositories;
import org.springframework.data.elasticsearch.utils.IndexInitializer;
import org.springframework.data.util.StreamUtils;
import org.springframework.test.context.ContextConfiguration;
/**
@ -361,6 +364,14 @@ public class SimpleElasticsearchRepositoryTests {
@Test
public void shouldDeleteAll() {
// given
String documentId = randomNumeric(5);
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(documentId);
sampleEntity.setMessage("hello world.");
sampleEntity.setVersion(System.currentTimeMillis());
repository.save(sampleEntity);
// when
repository.deleteAll();
@ -677,6 +688,32 @@ public class SimpleElasticsearchRepositoryTests {
assertThat(savedEntities).hasSize(0);
}
@Test // DATAES-832
void shouldNotReturnNullValuesInFindAllById() {
// given
String documentId1 = "id-one";
SampleEntity sampleEntity1 = new SampleEntity();
sampleEntity1.setId(documentId1);
repository.save(sampleEntity1);
String documentId2 = "id-two";
SampleEntity sampleEntity2 = new SampleEntity();
sampleEntity2.setId(documentId2);
repository.save(sampleEntity2);
String documentId3 = "id-three";
SampleEntity sampleEntity3 = new SampleEntity();
sampleEntity3.setId(documentId3);
repository.save(sampleEntity3);
Iterable<SampleEntity> allById = repository
.findAllById(Arrays.asList("id-one", "does-not-exist", "id-two", "where-am-i", "id-three"));
List<SampleEntity> results = StreamUtils.createStreamFromIterator(allById.iterator()).collect(Collectors.toList());
assertThat(results).hasSize(3);
assertThat(results.stream().map(SampleEntity::getId).collect(Collectors.toList()))
.containsExactlyInAnyOrder("id-one", "id-two", "id-three");
}
private static List<SampleEntity> createSampleEntitiesWithMessage(String message, int numberOfEntities) {
List<SampleEntity> sampleEntities = new ArrayList<>();