mirror of
https://github.com/spring-projects/spring-data-elasticsearch.git
synced 2025-06-12 07:02:10 +00:00
Switch reactive unpaged search from scroll to pit with search_after.
Original Pull Request #2393 Closes #1685
This commit is contained in:
parent
014aa3dbf6
commit
e1c8a2adeb
@ -306,7 +306,7 @@ public class ElasticsearchTemplate extends AbstractElasticsearchTemplate {
|
||||
Assert.notNull(query, "query must not be null");
|
||||
Assert.notNull(index, "index must not be null");
|
||||
|
||||
SearchRequest searchRequest = requestConverter.searchRequest(query, clazz, index, true, false);
|
||||
SearchRequest searchRequest = requestConverter.searchRequest(query, clazz, index, true);
|
||||
|
||||
SearchResponse<EntityAsMap> searchResponse = execute(client -> client.search(searchRequest, EntityAsMap.class));
|
||||
|
||||
@ -319,7 +319,7 @@ public class ElasticsearchTemplate extends AbstractElasticsearchTemplate {
|
||||
Assert.notNull(query, "query must not be null");
|
||||
Assert.notNull(index, "index must not be null");
|
||||
|
||||
SearchRequest searchRequest = requestConverter.searchRequest(query, clazz, index, false, false);
|
||||
SearchRequest searchRequest = requestConverter.searchRequest(query, clazz, index, false);
|
||||
SearchResponse<EntityAsMap> searchResponse = execute(client -> client.search(searchRequest, EntityAsMap.class));
|
||||
|
||||
ReadDocumentCallback<T> readDocumentCallback = new ReadDocumentCallback<>(elasticsearchConverter, clazz, index);
|
||||
|
@ -19,7 +19,6 @@ import static co.elastic.clients.util.ApiTypeHelper.*;
|
||||
import static org.springframework.data.elasticsearch.client.elc.TypeUtils.*;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.Result;
|
||||
import co.elastic.clients.elasticsearch._types.Time;
|
||||
import co.elastic.clients.elasticsearch.core.*;
|
||||
import co.elastic.clients.elasticsearch.core.bulk.BulkResponseItem;
|
||||
import co.elastic.clients.elasticsearch.core.get.GetResult;
|
||||
@ -35,14 +34,19 @@ import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.reactivestreams.Publisher;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.elasticsearch.BulkFailureException;
|
||||
import org.springframework.data.elasticsearch.NoSuchIndexException;
|
||||
import org.springframework.data.elasticsearch.UncategorizedElasticsearchException;
|
||||
import org.springframework.data.elasticsearch.client.UnsupportedBackendOperation;
|
||||
import org.springframework.data.elasticsearch.client.erhlc.ReactiveClusterOperations;
|
||||
import org.springframework.data.elasticsearch.client.util.ScrollState;
|
||||
import org.springframework.data.elasticsearch.core.AbstractReactiveElasticsearchTemplate;
|
||||
import org.springframework.data.elasticsearch.core.AggregationContainer;
|
||||
import org.springframework.data.elasticsearch.core.IndexedObjectInformation;
|
||||
@ -54,6 +58,7 @@ import org.springframework.data.elasticsearch.core.document.Document;
|
||||
import org.springframework.data.elasticsearch.core.document.SearchDocument;
|
||||
import org.springframework.data.elasticsearch.core.document.SearchDocumentResponse;
|
||||
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
|
||||
import org.springframework.data.elasticsearch.core.query.BaseQuery;
|
||||
import org.springframework.data.elasticsearch.core.query.BulkOptions;
|
||||
import org.springframework.data.elasticsearch.core.query.ByQueryResponse;
|
||||
import org.springframework.data.elasticsearch.core.query.Query;
|
||||
@ -64,6 +69,7 @@ import org.springframework.data.elasticsearch.core.reindex.ReindexResponse;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Implementation of {@link org.springframework.data.elasticsearch.core.ReactiveElasticsearchOperations} using the new
|
||||
@ -74,6 +80,8 @@ import org.springframework.util.CollectionUtils;
|
||||
*/
|
||||
public class ReactiveElasticsearchTemplate extends AbstractReactiveElasticsearchTemplate {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveElasticsearchTemplate.class);
|
||||
|
||||
private final ReactiveElasticsearchClient client;
|
||||
private final RequestConverter requestConverter;
|
||||
private final ResponseConverter responseConverter;
|
||||
@ -136,6 +144,32 @@ public class ReactiveElasticsearchTemplate extends AbstractReactiveElasticsearch
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Mono<Boolean> doExists(String id, IndexCoordinates index) {
|
||||
|
||||
Assert.notNull(id, "id must not be null");
|
||||
Assert.notNull(index, "index must not be null");
|
||||
|
||||
GetRequest getRequest = requestConverter.documentGetRequest(id, routingResolver.getRouting(), index, true);
|
||||
|
||||
return Mono.from(execute(
|
||||
((ClientCallback<Publisher<GetResponse<EntityAsMap>>>) client -> client.get(getRequest, EntityAsMap.class))))
|
||||
.map(GetResult::found) //
|
||||
.onErrorReturn(NoSuchIndexException.class, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<ByQueryResponse> delete(Query query, Class<?> entityType, IndexCoordinates index) {
|
||||
|
||||
Assert.notNull(query, "query must not be null");
|
||||
|
||||
DeleteByQueryRequest request = requestConverter.documentDeleteByQueryRequest(query, entityType, index,
|
||||
getRefreshPolicy());
|
||||
return Mono
|
||||
.from(execute((ClientCallback<Publisher<DeleteByQueryResponse>>) client -> client.deleteByQuery(request)))
|
||||
.map(responseConverter::byQueryResponse);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> Mono<T> get(String id, Class<T> entityType, IndexCoordinates index) {
|
||||
|
||||
@ -183,6 +217,29 @@ public class ReactiveElasticsearchTemplate extends AbstractReactiveElasticsearch
|
||||
: Mono.just(response.task()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<UpdateResponse> update(UpdateQuery updateQuery, IndexCoordinates index) {
|
||||
|
||||
Assert.notNull(updateQuery, "UpdateQuery must not be null");
|
||||
Assert.notNull(index, "Index must not be null");
|
||||
|
||||
UpdateRequest<Document, ?> request = requestConverter.documentUpdateRequest(updateQuery, index, getRefreshPolicy(),
|
||||
routingResolver.getRouting());
|
||||
|
||||
return Mono.from(execute(
|
||||
(ClientCallback<Publisher<co.elastic.clients.elasticsearch.core.UpdateResponse<Document>>>) client -> client
|
||||
.update(request, Document.class)))
|
||||
.flatMap(response -> {
|
||||
UpdateResponse.Result result = result(response.result());
|
||||
return result == null ? Mono.empty() : Mono.just(UpdateResponse.of(result));
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<ByQueryResponse> updateByQuery(UpdateQuery updateQuery, IndexCoordinates index) {
|
||||
throw new UnsupportedOperationException("not implemented");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> bulkUpdate(List<UpdateQuery> queries, BulkOptions bulkOptions, IndexCoordinates index) {
|
||||
|
||||
@ -279,87 +336,108 @@ public class ReactiveElasticsearchTemplate extends AbstractReactiveElasticsearch
|
||||
return new ReactiveElasticsearchTemplate(client, converter);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Mono<Boolean> doExists(String id, IndexCoordinates index) {
|
||||
|
||||
Assert.notNull(id, "id must not be null");
|
||||
Assert.notNull(index, "index must not be null");
|
||||
|
||||
GetRequest getRequest = requestConverter.documentGetRequest(id, routingResolver.getRouting(), index, true);
|
||||
|
||||
return Mono.from(execute(
|
||||
((ClientCallback<Publisher<GetResponse<EntityAsMap>>>) client -> client.get(getRequest, EntityAsMap.class))))
|
||||
.map(GetResult::found) //
|
||||
.onErrorReturn(NoSuchIndexException.class, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<ByQueryResponse> delete(Query query, Class<?> entityType, IndexCoordinates index) {
|
||||
|
||||
Assert.notNull(query, "query must not be null");
|
||||
|
||||
DeleteByQueryRequest request = requestConverter.documentDeleteByQueryRequest(query, entityType, index,
|
||||
getRefreshPolicy());
|
||||
return Mono
|
||||
.from(execute((ClientCallback<Publisher<DeleteByQueryResponse>>) client -> client.deleteByQuery(request)))
|
||||
.map(responseConverter::byQueryResponse);
|
||||
}
|
||||
|
||||
// region search operations
|
||||
|
||||
@Override
|
||||
protected Flux<SearchDocument> doFind(Query query, Class<?> clazz, IndexCoordinates index) {
|
||||
|
||||
return Flux.defer(() -> {
|
||||
boolean useScroll = !(query.getPageable().isPaged() || query.isLimiting());
|
||||
SearchRequest searchRequest = requestConverter.searchRequest(query, clazz, index, false, useScroll);
|
||||
boolean queryIsUnbounded = !(query.getPageable().isPaged() || query.isLimiting());
|
||||
|
||||
if (useScroll) {
|
||||
return doScroll(searchRequest);
|
||||
} else {
|
||||
return doFind(searchRequest);
|
||||
}
|
||||
return queryIsUnbounded ? doFindUnbounded(query, clazz, index) : doFindBounded(query, clazz, index);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private Flux<SearchDocument> doScroll(SearchRequest searchRequest) {
|
||||
private Flux<SearchDocument> doFindUnbounded(Query query, Class<?> clazz, IndexCoordinates index) {
|
||||
|
||||
Time scrollTimeout = searchRequest.scroll() != null ? searchRequest.scroll() : Time.of(t -> t.time("1m"));
|
||||
if (query instanceof BaseQuery baseQuery) {
|
||||
var pitKeepAlive = Duration.ofMinutes(5);
|
||||
// setup functions for Flux.usingWhen()
|
||||
Mono<PitSearchAfter> resourceSupplier = openPointInTime(index, pitKeepAlive, true)
|
||||
.map(pit -> new PitSearchAfter(baseQuery, pit));
|
||||
|
||||
Flux<ResponseBody<EntityAsMap>> searchResponses = Flux.usingWhen(Mono.fromSupplier(ScrollState::new), //
|
||||
state -> Mono
|
||||
.from(execute((ClientCallback<Publisher<ResponseBody<EntityAsMap>>>) client -> client.search(searchRequest,
|
||||
EntityAsMap.class))) //
|
||||
.expand(entityAsMapSearchResponse -> {
|
||||
Function<PitSearchAfter, Publisher<?>> asyncComplete = this::cleanupPit;
|
||||
|
||||
state.updateScrollId(entityAsMapSearchResponse.scrollId());
|
||||
BiFunction<PitSearchAfter, Throwable, Publisher<?>> asyncError = (psa, ex) -> {
|
||||
if (LOGGER.isErrorEnabled()) {
|
||||
LOGGER.error(String.format("Error during pit/search_after"), ex);
|
||||
}
|
||||
return cleanupPit(psa);
|
||||
};
|
||||
|
||||
if (entityAsMapSearchResponse.hits() == null
|
||||
|| CollectionUtils.isEmpty(entityAsMapSearchResponse.hits().hits())) {
|
||||
Function<PitSearchAfter, Publisher<?>> asyncCancel = psa -> {
|
||||
if (LOGGER.isWarnEnabled()) {
|
||||
LOGGER.warn(String.format("pit/search_after was cancelled"));
|
||||
}
|
||||
return cleanupPit(psa);
|
||||
};
|
||||
|
||||
Function<PitSearchAfter, Publisher<? extends ResponseBody<EntityAsMap>>> resourceClosure = psa -> {
|
||||
|
||||
baseQuery.setPointInTime(new Query.PointInTime(psa.getPit(), pitKeepAlive));
|
||||
baseQuery.addSort(Sort.by("_shard_doc"));
|
||||
SearchRequest firstSearchRequest = requestConverter.searchRequest(baseQuery, clazz, index, false, true);
|
||||
|
||||
return Mono.from(execute((ClientCallback<Publisher<ResponseBody<EntityAsMap>>>) client -> client
|
||||
.search(firstSearchRequest, EntityAsMap.class))).expand(entityAsMapSearchResponse -> {
|
||||
|
||||
var hits = entityAsMapSearchResponse.hits().hits();
|
||||
if (CollectionUtils.isEmpty(hits)) {
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
return Mono.from(execute((ClientCallback<Publisher<ScrollResponse<EntityAsMap>>>) client1 -> {
|
||||
ScrollRequest scrollRequest = ScrollRequest
|
||||
.of(sr -> sr.scrollId(state.getScrollId()).scroll(scrollTimeout));
|
||||
return client1.scroll(scrollRequest, EntityAsMap.class);
|
||||
}));
|
||||
}),
|
||||
this::cleanupScroll, (state, ex) -> cleanupScroll(state), this::cleanupScroll);
|
||||
List<Object> sortOptions = hits.get(hits.size() - 1).sort().stream().map(TypeUtils::toObject)
|
||||
.collect(Collectors.toList());
|
||||
baseQuery.setSearchAfter(sortOptions);
|
||||
SearchRequest followSearchRequest = requestConverter.searchRequest(baseQuery, clazz, index, false, true);
|
||||
return Mono.from(execute((ClientCallback<Publisher<ResponseBody<EntityAsMap>>>) client -> client
|
||||
.search(followSearchRequest, EntityAsMap.class)));
|
||||
});
|
||||
|
||||
return searchResponses.flatMapIterable(entityAsMapSearchResponse -> entityAsMapSearchResponse.hits().hits())
|
||||
.map(entityAsMapHit -> DocumentAdapters.from(entityAsMapHit, jsonpMapper));
|
||||
};
|
||||
|
||||
Flux<ResponseBody<EntityAsMap>> searchResponses = Flux.usingWhen(resourceSupplier, resourceClosure, asyncComplete,
|
||||
asyncError, asyncCancel);
|
||||
return searchResponses.flatMapIterable(entityAsMapSearchResponse -> entityAsMapSearchResponse.hits().hits())
|
||||
.map(entityAsMapHit -> DocumentAdapters.from(entityAsMapHit, jsonpMapper));
|
||||
} else {
|
||||
return Flux.error(new IllegalArgumentException("Query must be derived from BaseQuery"));
|
||||
}
|
||||
}
|
||||
|
||||
private Publisher<?> cleanupScroll(ScrollState state) {
|
||||
private Publisher<?> cleanupPit(PitSearchAfter psa) {
|
||||
var baseQuery = psa.getBaseQuery();
|
||||
baseQuery.setPointInTime(null);
|
||||
baseQuery.setSearchAfter(null);
|
||||
baseQuery.setSort(psa.getSort());
|
||||
var pit = psa.getPit();
|
||||
return StringUtils.hasText(pit) ? closePointInTime(pit) : Mono.empty();
|
||||
}
|
||||
|
||||
if (state.getScrollIds().isEmpty()) {
|
||||
return Mono.empty();
|
||||
static private class PitSearchAfter {
|
||||
private final BaseQuery baseQuery;
|
||||
@Nullable private final Sort sort;
|
||||
private final String pit;
|
||||
|
||||
PitSearchAfter(BaseQuery baseQuery, String pit) {
|
||||
this.baseQuery = baseQuery;
|
||||
this.sort = baseQuery.getSort();
|
||||
this.pit = pit;
|
||||
}
|
||||
|
||||
return execute((ClientCallback<Publisher<ClearScrollResponse>>) client -> client
|
||||
.clearScroll(ClearScrollRequest.of(csr -> csr.scrollId(state.getScrollIds()))));
|
||||
public BaseQuery getBaseQuery() {
|
||||
return baseQuery;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public Sort getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
public String getPit() {
|
||||
return pit;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -368,7 +446,7 @@ public class ReactiveElasticsearchTemplate extends AbstractReactiveElasticsearch
|
||||
Assert.notNull(query, "query must not be null");
|
||||
Assert.notNull(index, "index must not be null");
|
||||
|
||||
SearchRequest searchRequest = requestConverter.searchRequest(query, entityType, index, true, false);
|
||||
SearchRequest searchRequest = requestConverter.searchRequest(query, entityType, index, true);
|
||||
|
||||
return Mono
|
||||
.from(execute((ClientCallback<Publisher<ResponseBody<EntityAsMap>>>) client -> client.search(searchRequest,
|
||||
@ -376,7 +454,9 @@ public class ReactiveElasticsearchTemplate extends AbstractReactiveElasticsearch
|
||||
.map(searchResponse -> searchResponse.hits().total() != null ? searchResponse.hits().total().value() : 0L);
|
||||
}
|
||||
|
||||
private Flux<SearchDocument> doFind(SearchRequest searchRequest) {
|
||||
private Flux<SearchDocument> doFindBounded(Query query, Class<?> clazz, IndexCoordinates index) {
|
||||
|
||||
SearchRequest searchRequest = requestConverter.searchRequest(query, clazz, index, false, false);
|
||||
|
||||
return Mono
|
||||
.from(execute((ClientCallback<Publisher<ResponseBody<EntityAsMap>>>) client -> client.search(searchRequest,
|
||||
@ -391,7 +471,7 @@ public class ReactiveElasticsearchTemplate extends AbstractReactiveElasticsearch
|
||||
Assert.notNull(query, "query must not be null");
|
||||
Assert.notNull(index, "index must not be null");
|
||||
|
||||
SearchRequest searchRequest = requestConverter.searchRequest(query, clazz, index, false, false);
|
||||
SearchRequest searchRequest = requestConverter.searchRequest(query, clazz, index, false);
|
||||
|
||||
// noinspection unchecked
|
||||
SearchDocumentCallback<T> callback = new ReadSearchDocumentCallback<>((Class<T>) clazz, index);
|
||||
@ -458,29 +538,6 @@ public class ReactiveElasticsearchTemplate extends AbstractReactiveElasticsearch
|
||||
})).map(infoResponse -> infoResponse.version().number());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<UpdateResponse> update(UpdateQuery updateQuery, IndexCoordinates index) {
|
||||
|
||||
Assert.notNull(updateQuery, "UpdateQuery must not be null");
|
||||
Assert.notNull(index, "Index must not be null");
|
||||
|
||||
UpdateRequest<Document, ?> request = requestConverter.documentUpdateRequest(updateQuery, index, getRefreshPolicy(),
|
||||
routingResolver.getRouting());
|
||||
|
||||
return Mono.from(execute(
|
||||
(ClientCallback<Publisher<co.elastic.clients.elasticsearch.core.UpdateResponse<Document>>>) client -> client
|
||||
.update(request, Document.class)))
|
||||
.flatMap(response -> {
|
||||
UpdateResponse.Result result = result(response.result());
|
||||
return result == null ? Mono.empty() : Mono.just(UpdateResponse.of(result));
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<ByQueryResponse> updateByQuery(UpdateQuery updateQuery, IndexCoordinates index) {
|
||||
throw new UnsupportedOperationException("not implemented");
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
public <T> Publisher<T> execute(ReactiveElasticsearchOperations.ClientCallback<Publisher<T>> callback) {
|
||||
|
@ -15,12 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.elasticsearch.client.elc;
|
||||
|
||||
import static org.springframework.data.elasticsearch.client.elc.TypeUtils.searchType;
|
||||
import static org.springframework.data.elasticsearch.client.elc.TypeUtils.slices;
|
||||
import static org.springframework.data.elasticsearch.client.elc.TypeUtils.time;
|
||||
import static org.springframework.data.elasticsearch.client.elc.TypeUtils.timeStringMs;
|
||||
import static org.springframework.data.elasticsearch.client.elc.TypeUtils.toFloat;
|
||||
import static org.springframework.util.CollectionUtils.isEmpty;
|
||||
import static org.springframework.data.elasticsearch.client.elc.TypeUtils.*;
|
||||
import static org.springframework.util.CollectionUtils.*;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.Conflicts;
|
||||
import co.elastic.clients.elasticsearch._types.FieldValue;
|
||||
@ -37,18 +33,7 @@ import co.elastic.clients.elasticsearch._types.mapping.RuntimeFieldType;
|
||||
import co.elastic.clients.elasticsearch._types.mapping.TypeMapping;
|
||||
import co.elastic.clients.elasticsearch._types.query_dsl.Like;
|
||||
import co.elastic.clients.elasticsearch.cluster.HealthRequest;
|
||||
import co.elastic.clients.elasticsearch.core.BulkRequest;
|
||||
import co.elastic.clients.elasticsearch.core.ClosePointInTimeRequest;
|
||||
import co.elastic.clients.elasticsearch.core.DeleteByQueryRequest;
|
||||
import co.elastic.clients.elasticsearch.core.DeleteRequest;
|
||||
import co.elastic.clients.elasticsearch.core.GetRequest;
|
||||
import co.elastic.clients.elasticsearch.core.IndexRequest;
|
||||
import co.elastic.clients.elasticsearch.core.MgetRequest;
|
||||
import co.elastic.clients.elasticsearch.core.MsearchRequest;
|
||||
import co.elastic.clients.elasticsearch.core.OpenPointInTimeRequest;
|
||||
import co.elastic.clients.elasticsearch.core.SearchRequest;
|
||||
import co.elastic.clients.elasticsearch.core.UpdateByQueryRequest;
|
||||
import co.elastic.clients.elasticsearch.core.UpdateRequest;
|
||||
import co.elastic.clients.elasticsearch.core.*;
|
||||
import co.elastic.clients.elasticsearch.core.bulk.BulkOperation;
|
||||
import co.elastic.clients.elasticsearch.core.bulk.CreateOperation;
|
||||
import co.elastic.clients.elasticsearch.core.bulk.IndexOperation;
|
||||
@ -58,17 +43,8 @@ import co.elastic.clients.elasticsearch.core.msearch.MultisearchBody;
|
||||
import co.elastic.clients.elasticsearch.core.search.Highlight;
|
||||
import co.elastic.clients.elasticsearch.core.search.Rescore;
|
||||
import co.elastic.clients.elasticsearch.core.search.SourceConfig;
|
||||
import co.elastic.clients.elasticsearch.indices.CreateIndexRequest;
|
||||
import co.elastic.clients.elasticsearch.indices.DeleteIndexRequest;
|
||||
import co.elastic.clients.elasticsearch.indices.*;
|
||||
import co.elastic.clients.elasticsearch.indices.ExistsRequest;
|
||||
import co.elastic.clients.elasticsearch.indices.GetAliasRequest;
|
||||
import co.elastic.clients.elasticsearch.indices.GetIndexRequest;
|
||||
import co.elastic.clients.elasticsearch.indices.GetIndicesSettingsRequest;
|
||||
import co.elastic.clients.elasticsearch.indices.GetMappingRequest;
|
||||
import co.elastic.clients.elasticsearch.indices.IndexSettings;
|
||||
import co.elastic.clients.elasticsearch.indices.PutMappingRequest;
|
||||
import co.elastic.clients.elasticsearch.indices.RefreshRequest;
|
||||
import co.elastic.clients.elasticsearch.indices.UpdateAliasesRequest;
|
||||
import co.elastic.clients.elasticsearch.indices.update_aliases.Action;
|
||||
import co.elastic.clients.json.JsonData;
|
||||
import co.elastic.clients.json.JsonpDeserializer;
|
||||
@ -106,19 +82,7 @@ import org.springframework.data.elasticsearch.core.index.PutTemplateRequest;
|
||||
import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentEntity;
|
||||
import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentProperty;
|
||||
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
|
||||
import org.springframework.data.elasticsearch.core.query.BaseQuery;
|
||||
import org.springframework.data.elasticsearch.core.query.BulkOptions;
|
||||
import org.springframework.data.elasticsearch.core.query.CriteriaQuery;
|
||||
import org.springframework.data.elasticsearch.core.query.GeoDistanceOrder;
|
||||
import org.springframework.data.elasticsearch.core.query.IndexQuery;
|
||||
import org.springframework.data.elasticsearch.core.query.MoreLikeThisQuery;
|
||||
import org.springframework.data.elasticsearch.core.query.Order;
|
||||
import org.springframework.data.elasticsearch.core.query.Query;
|
||||
import org.springframework.data.elasticsearch.core.query.RescorerQuery;
|
||||
import org.springframework.data.elasticsearch.core.query.ScriptData;
|
||||
import org.springframework.data.elasticsearch.core.query.SourceFilter;
|
||||
import org.springframework.data.elasticsearch.core.query.StringQuery;
|
||||
import org.springframework.data.elasticsearch.core.query.UpdateQuery;
|
||||
import org.springframework.data.elasticsearch.core.query.*;
|
||||
import org.springframework.data.elasticsearch.core.reindex.ReindexRequest;
|
||||
import org.springframework.data.elasticsearch.core.reindex.Remote;
|
||||
import org.springframework.data.elasticsearch.support.DefaultStringObjectMap;
|
||||
@ -1030,18 +994,22 @@ class RequestConverter {
|
||||
// region search
|
||||
|
||||
public <T> SearchRequest searchRequest(Query query, @Nullable Class<T> clazz, IndexCoordinates indexCoordinates,
|
||||
boolean forCount, long scrollTimeInMillis) {
|
||||
boolean forCount) {
|
||||
return searchRequest(query, clazz, indexCoordinates, forCount, false, null);
|
||||
}
|
||||
|
||||
public <T> SearchRequest searchRequest(Query query, @Nullable Class<T> clazz, IndexCoordinates indexCoordinates,
|
||||
boolean forCount, long scrollTimeInMillis) {
|
||||
return searchRequest(query, clazz, indexCoordinates, forCount, true, scrollTimeInMillis);
|
||||
}
|
||||
|
||||
public <T> SearchRequest searchRequest(Query query, @Nullable Class<T> clazz, IndexCoordinates indexCoordinates,
|
||||
boolean forCount, boolean useScroll) {
|
||||
return searchRequest(query, clazz, indexCoordinates, forCount, useScroll, null);
|
||||
boolean forCount, boolean forBatchedSearch) {
|
||||
return searchRequest(query, clazz, indexCoordinates, forCount, forBatchedSearch, null);
|
||||
}
|
||||
|
||||
public <T> SearchRequest searchRequest(Query query, @Nullable Class<T> clazz, IndexCoordinates indexCoordinates,
|
||||
boolean forCount, boolean useScroll, @Nullable Long scrollTimeInMillis) {
|
||||
boolean forCount, boolean forBatchedSearch, @Nullable Long scrollTimeInMillis) {
|
||||
|
||||
String[] indexNames = indexCoordinates.getIndexNames();
|
||||
Assert.notNull(query, "query must not be null");
|
||||
@ -1049,7 +1017,7 @@ class RequestConverter {
|
||||
|
||||
elasticsearchConverter.updateQuery(query, clazz);
|
||||
SearchRequest.Builder builder = new SearchRequest.Builder();
|
||||
prepareSearchRequest(query, clazz, indexCoordinates, builder, forCount, useScroll);
|
||||
prepareSearchRequest(query, clazz, indexCoordinates, builder, forCount, forBatchedSearch);
|
||||
|
||||
if (scrollTimeInMillis != null) {
|
||||
builder.scroll(t -> t.time(scrollTimeInMillis + "ms"));
|
||||
@ -1184,7 +1152,7 @@ class RequestConverter {
|
||||
}
|
||||
|
||||
private <T> void prepareSearchRequest(Query query, @Nullable Class<T> clazz, IndexCoordinates indexCoordinates,
|
||||
SearchRequest.Builder builder, boolean forCount, boolean useScroll) {
|
||||
SearchRequest.Builder builder, boolean forCount, boolean forBatchedSearch) {
|
||||
|
||||
String[] indexNames = indexCoordinates.getIndexNames();
|
||||
|
||||
@ -1307,11 +1275,9 @@ class RequestConverter {
|
||||
builder.size(0) //
|
||||
.trackTotalHits(th -> th.count(Integer.MAX_VALUE)) //
|
||||
.source(SourceConfig.of(sc -> sc.fetch(false)));
|
||||
} else if (useScroll) {
|
||||
} else if (forBatchedSearch) {
|
||||
// request_cache is not allowed on scroll requests.
|
||||
builder.requestCache(null);
|
||||
Duration scrollTimeout = query.getScrollTime() != null ? query.getScrollTime() : Duration.ofMinutes(1);
|
||||
builder.scroll(time(scrollTimeout));
|
||||
// limit the number of documents in a batch
|
||||
builder.size(query.getReactiveBatchSize());
|
||||
}
|
||||
|
@ -125,6 +125,36 @@ final class TypeUtils {
|
||||
default -> throw new IllegalStateException("Unexpected value: " + fieldValue._kind());
|
||||
}
|
||||
}
|
||||
@Nullable
|
||||
static Object toObject(@Nullable FieldValue fieldValue) {
|
||||
|
||||
if (fieldValue == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
switch (fieldValue._kind()) {
|
||||
case Double -> {
|
||||
return Double.valueOf(fieldValue.doubleValue());
|
||||
}
|
||||
case Long -> {
|
||||
return Long.valueOf(fieldValue.longValue());
|
||||
}
|
||||
case Boolean -> {
|
||||
return Boolean.valueOf(fieldValue.booleanValue());
|
||||
}
|
||||
case String -> {
|
||||
return fieldValue.stringValue();
|
||||
}
|
||||
case Null -> {
|
||||
return null;
|
||||
}
|
||||
case Any -> {
|
||||
return fieldValue.anyValue().toString();
|
||||
}
|
||||
|
||||
default -> throw new IllegalStateException("Unexpected value: " + fieldValue._kind());
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
static GeoDistanceType geoDistanceType(GeoDistanceOrder.DistanceType distanceType) {
|
||||
|
@ -111,6 +111,13 @@ public class BaseQuery implements Query {
|
||||
this.reactiveBatchSize = builder.getReactiveBatchSize();
|
||||
}
|
||||
|
||||
/**
|
||||
* @since 5.1
|
||||
*/
|
||||
public void setSort(@Nullable Sort sort) {
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public Sort getSort() {
|
||||
|
@ -40,7 +40,7 @@ public class ElasticsearchPartQueryELCIntegrationTests extends ElasticsearchPart
|
||||
|
||||
JacksonJsonpMapper jsonpMapper = new JacksonJsonpMapper();
|
||||
RequestConverter requestConverter = new RequestConverter(operations.getElasticsearchConverter(), jsonpMapper);
|
||||
SearchRequest request = requestConverter.searchRequest(query, clazz, IndexCoordinates.of("dummy"), false, false);
|
||||
SearchRequest request = requestConverter.searchRequest(query, clazz, IndexCoordinates.of("dummy"), false);
|
||||
|
||||
return JsonUtils.toJson(request, jsonpMapper);
|
||||
// return "{\"query\":" + JsonUtils.toJson(request.query(), jsonpMapper) + "}";
|
||||
|
@ -463,11 +463,14 @@ public abstract class ReactiveElasticsearchIntegrationTests {
|
||||
|
||||
index(IntStream.range(0, 100).mapToObj(it -> randomEntity("entity - " + it)).toArray(SampleEntity[]::new));
|
||||
|
||||
CriteriaQuery query = new CriteriaQuery(new Criteria("message").contains("entity")) //
|
||||
.addSort(Sort.by("message"))//
|
||||
.setPageable(Pageable.unpaged());
|
||||
var query = CriteriaQuery.builder(new Criteria("message").contains("entity")) //
|
||||
.withSort(Sort.by("message")) //
|
||||
.withPageable(Pageable.unpaged()) //
|
||||
.withReactiveBatchSize(20) //
|
||||
.build();
|
||||
|
||||
operations.search(query, SampleEntity.class).as(StepVerifier::create) //
|
||||
operations.search(query, SampleEntity.class) //
|
||||
.as(StepVerifier::create) //
|
||||
.expectNextCount(100) //
|
||||
.verifyComplete();
|
||||
}
|
||||
|
@ -18,6 +18,7 @@ package org.springframework.data.elasticsearch.junit.jupiter;
|
||||
import static org.springframework.util.StringUtils.*;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.time.Duration;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
@ -131,7 +132,7 @@ public class ClusterConnection implements ExtensionContext.Store.CloseableResour
|
||||
DockerImageName dockerImageName = getDockerImageName(testcontainersProperties);
|
||||
|
||||
ElasticsearchContainer elasticsearchContainer = new SpringDataElasticsearchContainer(dockerImageName)
|
||||
.withEnv(testcontainersProperties);
|
||||
.withEnv(testcontainersProperties).withStartupTimeout(Duration.ofMinutes(2));
|
||||
elasticsearchContainer.start();
|
||||
|
||||
return ClusterConnectionInfo.builder() //
|
||||
|
Loading…
x
Reference in New Issue
Block a user