Code cleanup.

Original Pull Request #2890
Closes #2888
This commit is contained in:
Peter-Josef Meisch 2024-04-09 23:24:30 +02:00 committed by GitHub
parent 0beca99912
commit 1d709f6c55
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
97 changed files with 567 additions and 492 deletions

View File

@ -15,9 +15,6 @@
*/
package org.springframework.data.elasticsearch.annotations;
import org.springframework.data.annotation.ReadOnlyProperty;
import org.springframework.data.annotation.Transient;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
@ -25,10 +22,10 @@ import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation to mark a String property of an entity to be filled with the name of the index where the entity was
* stored after it is indexed into Elasticsearch. This can be used when the name of the index is dynamically created
* or when a document was indexed into a write alias.
*
* Annotation to mark a String property of an entity to be filled with the name of the index where the entity was stored
* after it is indexed into Elasticsearch. This can be used when the name of the index is dynamically created or when a
* document was indexed into a write alias.
* <p>
* This can not be used to specify the index where an entity should be written to.
*
* @author Peter-Josef Meisch

View File

@ -82,6 +82,6 @@ public @interface Mapping {
MappingAlias[] aliases() default {};
enum Detection {
DEFAULT, TRUE, FALSE;
DEFAULT, TRUE, FALSE
}
}

View File

@ -16,7 +16,6 @@
package org.springframework.data.elasticsearch.client.elc;
import co.elastic.clients.elasticsearch.ElasticsearchClient;
import co.elastic.clients.elasticsearch.cluster.ElasticsearchClusterClient;
import co.elastic.clients.transport.ElasticsearchTransport;
import org.elasticsearch.client.RestClient;
@ -40,9 +39,4 @@ public class AutoCloseableElasticsearchClient extends ElasticsearchClient implem
public void close() throws Exception {
transport.close();
}
@Override
public ElasticsearchClusterClient cluster() {
return super.cluster();
}
}

View File

@ -365,7 +365,7 @@ class CriteriaQueryProcessor {
if (item != null) {
if (sb.length() > 0) {
if (!sb.isEmpty()) {
sb.append(' ');
}
sb.append('"');

View File

@ -19,7 +19,6 @@ import co.elastic.clients.json.JsonpMapper;
import jakarta.json.stream.JsonGenerator;
import java.io.ByteArrayOutputStream;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import org.apache.commons.logging.Log;
@ -44,17 +43,13 @@ final class JsonUtils {
mapper.serialize(object, generator);
generator.close();
String json = "{}";
try {
json = baos.toString("UTF-8");
} catch (UnsupportedEncodingException e) {
LOGGER.warn("could not read json", e);
}
json = baos.toString(StandardCharsets.UTF_8);
return json;
}
@Nullable
public static String queryToJson(@Nullable co.elastic.clients.elasticsearch._types.query_dsl.Query query, JsonpMapper mapper) {
public static String queryToJson(@Nullable co.elastic.clients.elasticsearch._types.query_dsl.Query query,
JsonpMapper mapper) {
if (query == null) {
return null;

View File

@ -47,8 +47,8 @@ public class NativeQueryBuilder extends BaseQueryBuilder<NativeQuery, NativeQuer
private final Map<String, Aggregation> aggregations = new LinkedHashMap<>();
@Nullable private Suggester suggester;
@Nullable private FieldCollapse fieldCollapse;
private List<SortOptions> sortOptions = new ArrayList<>();
private Map<String, JsonData> searchExtensions = new LinkedHashMap<>();
private final List<SortOptions> sortOptions = new ArrayList<>();
private final Map<String, JsonData> searchExtensions = new LinkedHashMap<>();
@Nullable private org.springframework.data.elasticsearch.core.query.Query springDataQuery;
@Nullable private KnnQuery knnQuery;

View File

@ -157,7 +157,6 @@ class RequestConverter {
aliasActions.getActions().forEach(aliasAction -> {
if (aliasAction instanceof AliasAction.Add add) {
var parameters = add.getParameters();
// noinspection DuplicatedCode
String[] parametersAliases = parameters.getAliases();
if (parametersAliases != null) {
for (String aliasName : parametersAliases) {
@ -173,7 +172,6 @@ class RequestConverter {
private Alias.Builder buildAlias(AliasActionParameters parameters, Alias.Builder aliasBuilder) {
// noinspection DuplicatedCode
if (parameters.getRouting() != null) {
aliasBuilder.routing(parameters.getRouting());
}
@ -416,7 +414,6 @@ class RequestConverter {
if (aliasActions != null) {
aliasActions.getActions().forEach(aliasAction -> {
AliasActionParameters parameters = aliasAction.getParameters();
// noinspection DuplicatedCode
String[] parametersAliases = parameters.getAliases();
if (parametersAliases != null) {
@ -450,7 +447,6 @@ class RequestConverter {
aliasActions.getActions().forEach(aliasAction -> {
if (aliasAction instanceof AliasAction.Add add) {
var parameters = add.getParameters();
// noinspection DuplicatedCode
String[] parametersAliases = parameters.getAliases();
if (parametersAliases != null) {
for (String aliasName : parametersAliases) {

View File

@ -15,9 +15,8 @@
*/
package org.springframework.data.elasticsearch.client.elc;
import static org.springframework.data.elasticsearch.client.elc.JsonUtils.toJson;
import static org.springframework.data.elasticsearch.client.elc.TypeUtils.removePrefixFromJson;
import static org.springframework.data.elasticsearch.client.elc.TypeUtils.typeMapping;
import static org.springframework.data.elasticsearch.client.elc.JsonUtils.*;
import static org.springframework.data.elasticsearch.client.elc.TypeUtils.*;
import co.elastic.clients.elasticsearch._types.BulkIndexByScrollFailure;
import co.elastic.clients.elasticsearch._types.ErrorCause;
@ -36,7 +35,12 @@ import co.elastic.clients.elasticsearch.indices.get_index_template.IndexTemplate
import co.elastic.clients.elasticsearch.indices.get_mapping.IndexMappingRecord;
import co.elastic.clients.json.JsonpMapper;
import java.util.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
@ -47,7 +51,11 @@ import org.springframework.data.elasticsearch.core.IndexInformation;
import org.springframework.data.elasticsearch.core.MultiGetItem;
import org.springframework.data.elasticsearch.core.cluster.ClusterHealth;
import org.springframework.data.elasticsearch.core.document.Document;
import org.springframework.data.elasticsearch.core.index.*;
import org.springframework.data.elasticsearch.core.index.AliasData;
import org.springframework.data.elasticsearch.core.index.Settings;
import org.springframework.data.elasticsearch.core.index.TemplateData;
import org.springframework.data.elasticsearch.core.index.TemplateResponse;
import org.springframework.data.elasticsearch.core.index.TemplateResponseData;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.ByQueryResponse;
import org.springframework.data.elasticsearch.core.query.StringQuery;
@ -182,7 +190,7 @@ class ResponseConverter {
Map<String, IndexMappingRecord> mappings = getMappingResponse.result();
if (mappings == null || mappings.size() == 0) {
if (mappings == null || mappings.isEmpty()) {
return Document.create();
}

View File

@ -299,6 +299,7 @@ public abstract class AbstractElasticsearchTemplate implements ElasticsearchOper
}
@Override
@Deprecated
public ByQueryResponse delete(Query query, Class<?> clazz) {
return delete(query, clazz, getIndexCoordinatesFor(clazz));
}

View File

@ -409,6 +409,7 @@ abstract public class AbstractReactiveElasticsearchTemplate
abstract protected Mono<String> doDeleteById(String id, @Nullable String routing, IndexCoordinates index);
@Override
@Deprecated
public Mono<ByQueryResponse> delete(Query query, Class<?> entityType) {
return delete(query, entityType, getIndexCoordinatesFor(entityType));
}

View File

@ -282,6 +282,7 @@ public interface DocumentOperations {
* @since 4.1
* @deprecated since 5.3.0, use {@link #delete(DeleteQuery, Class)}
*/
@Deprecated
ByQueryResponse delete(Query query, Class<?> clazz);
/**
@ -305,6 +306,7 @@ public interface DocumentOperations {
* @return response with detailed information
* @deprecated since 5.3.0, use {@link #delete(DeleteQuery, Class, IndexCoordinates)}
*/
@Deprecated
ByQueryResponse delete(Query query, Class<?> clazz, IndexCoordinates index);
/**

View File

@ -334,6 +334,7 @@ public interface ReactiveDocumentOperations {
* @return a {@link Mono} emitting the number of the removed documents.
* @deprecated since 5.3.0, use {@link #delete(DeleteQuery, Class)}
*/
@Deprecated
Mono<ByQueryResponse> delete(Query query, Class<?> entityType);
/**
@ -355,6 +356,7 @@ public interface ReactiveDocumentOperations {
* @return a {@link Mono} emitting the number of the removed documents.
* @deprecated since 5.3.0, use {@link #delete(DeleteQuery, Class, IndexCoordinates)}
*/
@Deprecated
Mono<ByQueryResponse> delete(Query query, Class<?> entityType, IndexCoordinates index);
/**

View File

@ -22,5 +22,5 @@ package org.springframework.data.elasticsearch.core;
* @since 4.2
*/
public enum RefreshPolicy {
NONE, IMMEDIATE, WAIT_UNTIL;
NONE, IMMEDIATE, WAIT_UNTIL
}

View File

@ -167,7 +167,7 @@ public class SearchHitMapping<T> {
Map<String, SearchHits<?>> innerHits = new LinkedHashMap<>();
Map<String, SearchDocumentResponse> documentInnerHits = searchDocument.getInnerHits();
if (documentInnerHits != null && documentInnerHits.size() > 0) {
if (documentInnerHits != null && !documentInnerHits.isEmpty()) {
SearchHitMapping<SearchDocument> searchDocumentSearchHitMapping = SearchHitMapping
.mappingFor(SearchDocument.class, converter);
@ -287,8 +287,8 @@ public class SearchHitMapping<T> {
}
private static class ElasticsearchPersistentEntityWithNestedMetaData {
@Nullable private ElasticsearchPersistentEntity<?> entity;
private NestedMetaData nestedMetaData;
@Nullable private final ElasticsearchPersistentEntity<?> entity;
private final NestedMetaData nestedMetaData;
public ElasticsearchPersistentEntityWithNestedMetaData(@Nullable ElasticsearchPersistentEntity<?> entity,
NestedMetaData nestedMetaData) {

View File

@ -23,8 +23,8 @@ import java.util.stream.Stream;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.repository.util.ReactiveWrappers;
import org.springframework.data.util.CloseableIterator;
import org.springframework.data.util.ReactiveWrappers;
import org.springframework.lang.Nullable;
/**

View File

@ -42,7 +42,7 @@ public class SearchHitsImpl<T> implements SearchScrollHits<T> {
private final Lazy<List<SearchHit<T>>> unmodifiableSearchHits;
@Nullable private final AggregationsContainer<?> aggregations;
@Nullable private final Suggest suggest;
@Nullable private String pointInTimeId;
@Nullable private final String pointInTimeId;
@Nullable private final SearchShardStatistics searchShardStatistics;
/**

View File

@ -61,10 +61,10 @@ abstract class StreamQueries {
return new SearchHitsIterator<>() {
private volatile AtomicInteger currentCount = new AtomicInteger();
private final AtomicInteger currentCount = new AtomicInteger();
private volatile Iterator<SearchHit<T>> currentScrollHits = searchHits.iterator();
private volatile boolean continueScroll = currentScrollHits.hasNext();
private volatile ScrollState scrollState = new ScrollState(searchHits.getScrollId());
private final ScrollState scrollState = new ScrollState(searchHits.getScrollId());
private volatile boolean isClosed = false;
@Override

View File

@ -110,6 +110,6 @@ public interface ElasticsearchConverter
* @return a String wihere the property names are replaced with field names
* @since 5.2
*/
public String updateFieldNames(String propertyPath, ElasticsearchPersistentEntity<?> persistentEntity);
String updateFieldNames(String propertyPath, ElasticsearchPersistentEntity<?> persistentEntity);
// endregion
}

View File

@ -344,7 +344,7 @@ public class GeoConverters {
String type = GeoConverters.getGeoJsonType(source);
Assert.isTrue(type.equalsIgnoreCase(GeoJsonPolygon.TYPE), "does not contain a type 'Polygon'");
List<GeoJsonLineString> lines = geoJsonLineStringsFromMap(source);
Assert.isTrue(lines.size() > 0, "no linestrings defined in polygon");
Assert.isTrue(!lines.isEmpty(), "no linestrings defined in polygon");
GeoJsonPolygon geoJsonPolygon = GeoJsonPolygon.of(lines.get(0));
for (int i = 1; i < lines.size(); i++) {
geoJsonPolygon = geoJsonPolygon.withInnerRing(lines.get(i));

View File

@ -34,6 +34,9 @@ import org.springframework.core.convert.ConversionService;
import org.springframework.core.convert.ConverterNotFoundException;
import org.springframework.core.convert.support.DefaultConversionService;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.core.env.Environment;
import org.springframework.core.env.EnvironmentCapable;
import org.springframework.core.env.StandardEnvironment;
import org.springframework.data.convert.CustomConversions;
import org.springframework.data.elasticsearch.annotations.FieldType;
import org.springframework.data.elasticsearch.annotations.ScriptedField;
@ -58,6 +61,7 @@ import org.springframework.data.mapping.SimplePropertyHandler;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mapping.model.*;
import org.springframework.data.util.TypeInformation;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.format.datetime.DateFormatterRegistrar;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
@ -65,8 +69,6 @@ import org.springframework.util.ClassUtils;
import org.springframework.util.CollectionUtils;
import org.springframework.util.ObjectUtils;
import javax.print.Doc;
/**
* Elasticsearch specific {@link org.springframework.data.convert.EntityConverter} implementation based on domain type
* {@link ElasticsearchPersistentEntity metadata}.
@ -86,7 +88,7 @@ import javax.print.Doc;
* @since 3.2
*/
public class MappingElasticsearchConverter
implements ElasticsearchConverter, ApplicationContextAware, InitializingBean {
implements ElasticsearchConverter, ApplicationContextAware, InitializingBean, EnvironmentCapable {
private static final String INCOMPATIBLE_TYPES = "Cannot convert %1$s of type %2$s into an instance of %3$s! Implement a custom Converter<%2$s, %3$s> and register it with the CustomConversions.";
private static final String INVALID_TYPE_TO_READ = "Expected to read Document %s into type %s but didn't find a PersistentEntity for the latter!";
@ -96,7 +98,14 @@ public class MappingElasticsearchConverter
private final MappingContext<? extends ElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> mappingContext;
private final GenericConversionService conversionService;
private CustomConversions conversions = new ElasticsearchCustomConversions(Collections.emptyList());
protected @Nullable Environment environment;
private final SpELContext spELContext = new SpELContext(new MapAccessor());
private final SpelExpressionParser expressionParser = new SpelExpressionParser();
private final CachingValueExpressionEvaluatorFactory expressionEvaluatorFactory = new CachingValueExpressionEvaluatorFactory(
expressionParser, this, spELContext);
private final EntityInstantiators instantiators = new EntityInstantiators();
private final ElasticsearchTypeMapper typeMapper;
@ -124,6 +133,14 @@ public class MappingElasticsearchConverter
}
}
@Override
public Environment getEnvironment() {
if (environment == null) {
environment = new StandardEnvironment();
}
return environment;
}
@Override
public MappingContext<? extends ElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> getMappingContext() {
return mappingContext;
@ -162,7 +179,8 @@ public class MappingElasticsearchConverter
@Override
public <R> R read(Class<R> type, Document source) {
Reader reader = new Reader(mappingContext, conversionService, conversions, typeMapper, spELContext, instantiators);
Reader reader = new Reader(mappingContext, conversionService, conversions, typeMapper, expressionEvaluatorFactory,
instantiators);
return reader.read(type, source);
}
@ -202,29 +220,29 @@ public class MappingElasticsearchConverter
*/
private static class Reader extends Base {
private final SpELContext spELContext;
private final EntityInstantiators instantiators;
private final CachingValueExpressionEvaluatorFactory expressionEvaluatorFactory;
public Reader(
MappingContext<? extends ElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> mappingContext,
GenericConversionService conversionService, CustomConversions conversions, ElasticsearchTypeMapper typeMapper,
SpELContext spELContext, EntityInstantiators instantiators) {
CachingValueExpressionEvaluatorFactory expressionEvaluatorFactory, EntityInstantiators instantiators) {
super(mappingContext, conversionService, conversions, typeMapper);
this.spELContext = spELContext;
this.expressionEvaluatorFactory = expressionEvaluatorFactory;
this.instantiators = instantiators;
}
@SuppressWarnings("unchecked")
/**
* Reads the given source into the given type.
*
* @param type they type to convert the given source to.
* @param type the type to convert the given source to.
* @param source the source to create an object of the given type from.
* @return the object that was read
*/
<R> R read(Class<R> type, Document source) {
// noinspection unchecked
TypeInformation<R> typeInformation = TypeInformation.of((Class<R>) ClassUtils.getUserClass(type));
R r = read(typeInformation, source);
@ -316,8 +334,7 @@ public class MappingElasticsearchConverter
private <R> R readEntity(ElasticsearchPersistentEntity<?> entity, Map<String, Object> source) {
ElasticsearchPersistentEntity<?> targetEntity = computeClosestEntity(entity, source);
SpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(source, spELContext);
ValueExpressionEvaluator evaluator = expressionEvaluatorFactory.create(source);
MapValueAccessor accessor = new MapValueAccessor(source);
InstanceCreatorMetadata<?> creatorMetadata = entity.getInstanceCreatorMetadata();
@ -384,7 +401,7 @@ public class MappingElasticsearchConverter
}
private ParameterValueProvider<ElasticsearchPersistentProperty> getParameterProvider(
ElasticsearchPersistentEntity<?> entity, MapValueAccessor source, SpELExpressionEvaluator evaluator) {
ElasticsearchPersistentEntity<?> entity, MapValueAccessor source, ValueExpressionEvaluator evaluator) {
ElasticsearchPropertyValueProvider provider = new ElasticsearchPropertyValueProvider(source, evaluator);
@ -393,7 +410,7 @@ public class MappingElasticsearchConverter
PersistentEntityParameterValueProvider<ElasticsearchPersistentProperty> parameterProvider = new PersistentEntityParameterValueProvider<>(
entity, provider, null);
return new ConverterAwareSpELExpressionParameterValueProvider(evaluator, conversionService, parameterProvider);
return new ConverterAwareValueExpressionParameterValueProvider(evaluator, conversionService, parameterProvider);
}
private boolean isAssignedSeqNo(long seqNo) {
@ -475,7 +492,7 @@ public class MappingElasticsearchConverter
TypeInformation<?> collectionComponentType = getCollectionComponentType(type);
if (collectionComponentType != null) {
Object o = read(collectionComponentType, (Map<String, Object>) value);
return getCollectionWithSingleElement(type, collectionComponentType, o);
return (o != null) ? getCollectionWithSingleElement(type, collectionComponentType, o) : null;
}
return (T) read(type, (Map<String, Object>) value);
} else {
@ -484,7 +501,7 @@ public class MappingElasticsearchConverter
if (collectionComponentType != null
&& collectionComponentType.isAssignableFrom(TypeInformation.of(value.getClass()))) {
Object o = getPotentiallyConvertedSimpleRead(value, collectionComponentType);
return getCollectionWithSingleElement(type, collectionComponentType, o);
return (o != null) ? getCollectionWithSingleElement(type, collectionComponentType, o) : null;
}
return (T) getPotentiallyConvertedSimpleRead(value, rawType);
@ -502,7 +519,7 @@ public class MappingElasticsearchConverter
/**
* @param type the type to check
* @return true if type is a collectoin, null otherwise,
* @return the collection type if type is a collection, null otherwise,
*/
@Nullable
TypeInformation<?> getCollectionComponentType(TypeInformation<?> type) {
@ -618,9 +635,10 @@ public class MappingElasticsearchConverter
* but will be removed from spring-data-commons, so we do it here
*/
@Nullable
private Object convertFromCollectionToObject(Object value, @Nullable Class<?> target) {
private Object convertFromCollectionToObject(Object value, Class<?> target) {
if (value.getClass().isArray()) {
// noinspection ArraysAsListWithZeroOrOneArgument
value = Arrays.asList(value);
}
@ -670,9 +688,9 @@ public class MappingElasticsearchConverter
class ElasticsearchPropertyValueProvider implements PropertyValueProvider<ElasticsearchPersistentProperty> {
final MapValueAccessor accessor;
final SpELExpressionEvaluator evaluator;
final ValueExpressionEvaluator evaluator;
ElasticsearchPropertyValueProvider(MapValueAccessor accessor, SpELExpressionEvaluator evaluator) {
ElasticsearchPropertyValueProvider(MapValueAccessor accessor, ValueExpressionEvaluator evaluator) {
this.accessor = accessor;
this.evaluator = evaluator;
}
@ -692,33 +710,29 @@ public class MappingElasticsearchConverter
}
/**
* Extension of {@link SpELExpressionParameterValueProvider} to recursively trigger value conversion on the raw
* Extension of {@link ValueExpressionParameterValueProvider} to recursively trigger value conversion on the raw
* resolved SpEL value.
*
* @author Mark Paluch
*/
private class ConverterAwareSpELExpressionParameterValueProvider
extends SpELExpressionParameterValueProvider<ElasticsearchPersistentProperty> {
private class ConverterAwareValueExpressionParameterValueProvider
extends ValueExpressionParameterValueProvider<ElasticsearchPersistentProperty> {
/**
* Creates a new {@link ConverterAwareSpELExpressionParameterValueProvider}.
* Creates a new {@link ConverterAwareValueExpressionParameterValueProvider}.
*
* @param evaluator must not be {@literal null}.
* @param conversionService must not be {@literal null}.
* @param delegate must not be {@literal null}.
*/
public ConverterAwareSpELExpressionParameterValueProvider(SpELExpressionEvaluator evaluator,
public ConverterAwareValueExpressionParameterValueProvider(ValueExpressionEvaluator evaluator,
ConversionService conversionService, ParameterValueProvider<ElasticsearchPersistentProperty> delegate) {
super(evaluator, conversionService, delegate);
}
/*
* (non-Javadoc)
* @see org.springframework.data.mapping.model.SpELExpressionParameterValueProvider#potentiallyConvertSpelValue(java.lang.Object, org.springframework.data.mapping.PreferredConstructor.Parameter)
*/
@Override
protected <T> T potentiallyConvertSpelValue(Object object,
protected <T> T potentiallyConvertExpressionValue(Object object,
Parameter<T, ElasticsearchPersistentProperty> parameter) {
return readValue(object, parameter.getType());
}
@ -995,12 +1009,8 @@ public class MappingElasticsearchConverter
private static boolean hasEmptyValue(Object value) {
if (value instanceof String s && s.isEmpty() || value instanceof Collection<?> c && c.isEmpty()
|| value instanceof Map<?, ?> m && m.isEmpty()) {
return true;
}
return false;
return value instanceof String s && s.isEmpty() || value instanceof Collection<?> c && c.isEmpty()
|| value instanceof Map<?, ?> m && m.isEmpty();
}
@SuppressWarnings("unchecked")
@ -1402,12 +1412,18 @@ public class MappingElasticsearchConverter
if (properties.length > 1) {
var persistentProperty = persistentEntity.getPersistentProperty(propertyName);
return (persistentProperty != null)
? fieldName + "." + updateFieldNames(properties[1], mappingContext.getPersistentEntity(persistentProperty))
: fieldName;
} else {
return fieldName;
if (persistentProperty != null) {
ElasticsearchPersistentEntity<?> nestedPersistentEntity = mappingContext
.getPersistentEntity(persistentProperty);
if (nestedPersistentEntity != null) {
return fieldName + '.' + updateFieldNames(properties[1], nestedPersistentEntity);
} else {
return fieldName;
}
}
}
return fieldName;
} else {
return propertyPath;
}
@ -1416,6 +1432,7 @@ public class MappingElasticsearchConverter
// endregion
@SuppressWarnings("ClassCanBeRecord")
static class MapValueAccessor {
final Map<String, Object> target;

View File

@ -18,11 +18,7 @@ package org.springframework.data.elasticsearch.core.document;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.function.BooleanSupplier;
import java.util.function.Function;
import java.util.function.IntSupplier;
import java.util.function.LongSupplier;
import java.util.function.Supplier;
import org.springframework.data.elasticsearch.core.convert.ConversionException;
import org.springframework.data.elasticsearch.support.StringObjectMap;
@ -30,8 +26,8 @@ import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
/**
* A representation of an Elasticsearch document as extended {@link StringObjectMap Map}. All iterators preserve original
* insertion order.
* A representation of an Elasticsearch document as extended {@link StringObjectMap Map}. All iterators preserve
* original insertion order.
* <p>
* Document does not allow {@code null} keys. It allows {@literal null} values.
* <p>
@ -60,7 +56,7 @@ public interface Document extends StringObjectMap<Document> {
* @param map source map containing key-value pairs and sub-documents. must not be {@literal null}.
* @return a new {@link Document}.
*/
static Document from(Map<String, ? extends Object> map) {
static Document from(Map<String, ?> map) {
Assert.notNull(map, "Map must not be null");

View File

@ -24,8 +24,8 @@ import org.springframework.data.geo.Box;
*/
public class GeoBox {
private GeoPoint topLeft;
private GeoPoint bottomRight;
private final GeoPoint topLeft;
private final GeoPoint bottomRight;
public GeoBox(GeoPoint topLeft, GeoPoint bottomRight) {
this.topLeft = topLeft;

View File

@ -33,7 +33,7 @@ public class GeoJsonMultiPolygon implements GeoJson<Iterable<GeoJsonPolygon>> {
public static final String TYPE = "MultiPolygon";
private List<GeoJsonPolygon> coordinates = new ArrayList<>();
private final List<GeoJsonPolygon> coordinates = new ArrayList<>();
private GeoJsonMultiPolygon(List<GeoJsonPolygon> polygons) {
this.coordinates.addAll(polygons);

View File

@ -443,7 +443,7 @@ public class MappingBuilder {
contextNode.put(FIELD_CONTEXT_NAME, context.name());
contextNode.put(FIELD_CONTEXT_TYPE, context.type().getMappedName());
if (context.precision().length() > 0) {
if (!context.precision().isEmpty()) {
contextNode.put(FIELD_CONTEXT_PRECISION, context.precision());
}

View File

@ -17,7 +17,7 @@ package org.springframework.data.elasticsearch.core.join;
import java.util.Objects;
import org.springframework.data.annotation.PersistenceConstructor;
import org.springframework.data.annotation.PersistenceCreator;
import org.springframework.lang.Nullable;
/**
@ -39,7 +39,7 @@ public class JoinField<ID> {
this(name, null);
}
@PersistenceConstructor
@PersistenceCreator
public JoinField(String name, @Nullable ID parent) {
this.name = name;
this.parent = parent;

View File

@ -73,8 +73,8 @@ public class SimpleElasticsearchPersistentEntity<T> extends BasicPersistentEntit
private @Nullable ElasticsearchPersistentProperty joinFieldProperty;
private @Nullable ElasticsearchPersistentProperty indexedIndexNameProperty;
private @Nullable Document.VersionType versionType;
private boolean createIndexAndMapping;
private boolean alwaysWriteMapping;
private final boolean createIndexAndMapping;
private final boolean alwaysWriteMapping;
private final Dynamic dynamic;
private final Map<String, ElasticsearchPersistentProperty> fieldNamePropertyCache = new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, Expression> routingExpressions = new ConcurrentHashMap<>();

View File

@ -230,11 +230,17 @@ public class ByQueryResponse {
return aborted;
}
@Nullable
public ElasticsearchErrorCause getElasticsearchErrorCause() {
return elasticsearchErrorCause;
}
/**
* Create a new {@link FailureBuilder} to build {@link Failure}
*
* @return a new {@link FailureBuilder} to build {@link Failure}
*/
public static FailureBuilder builder() {
return new FailureBuilder();
}

View File

@ -673,8 +673,8 @@ public class Criteria {
*/
public Criteria boundedBy(String topLeftGeohash, String bottomRightGeohash) {
Assert.isTrue(!StringUtils.isEmpty(topLeftGeohash), "topLeftGeohash must not be empty");
Assert.isTrue(!StringUtils.isEmpty(bottomRightGeohash), "bottomRightGeohash must not be empty");
Assert.isTrue(StringUtils.hasLength(topLeftGeohash), "topLeftGeohash must not be empty");
Assert.isTrue(StringUtils.hasLength(bottomRightGeohash), "bottomRightGeohash must not be empty");
filterCriteriaEntries
.add(new CriteriaEntry(OperationKey.BBOX, new Object[] { topLeftGeohash, bottomRightGeohash }));
@ -757,7 +757,7 @@ public class Criteria {
*/
public Criteria within(String geoLocation, String distance) {
Assert.isTrue(!StringUtils.isEmpty(geoLocation), "geoLocation value must not be null");
Assert.isTrue(StringUtils.hasLength(geoLocation), "geoLocation value must not be null");
filterCriteriaEntries.add(new CriteriaEntry(OperationKey.WITHIN, new Object[] { geoLocation, distance }));
return this;

View File

@ -23,8 +23,8 @@ package org.springframework.data.elasticsearch.core.query;
*/
public class IndexBoost {
private String indexName;
private float boost;
private final String indexName;
private final float boost;
public IndexBoost(String indexName, float boost) {
this.indexName = indexName;

View File

@ -25,8 +25,8 @@ import java.util.EnumSet;
*/
public class IndicesOptions {
private EnumSet<Option> options;
private EnumSet<WildcardStates> expandWildcards;
private final EnumSet<Option> options;
private final EnumSet<WildcardStates> expandWildcards;
public static final IndicesOptions STRICT_EXPAND_OPEN = new IndicesOptions(
EnumSet.of(IndicesOptions.Option.ALLOW_NO_INDICES), EnumSet.of(IndicesOptions.WildcardStates.OPEN));
@ -101,10 +101,10 @@ public class IndicesOptions {
}
public enum WildcardStates {
OPEN, CLOSED, HIDDEN, ALL, NONE;
OPEN, CLOSED, HIDDEN, ALL, NONE
}
public enum Option {
IGNORE_UNAVAILABLE, IGNORE_ALIASES, ALLOW_NO_INDICES, FORBID_ALIASES_TO_MULTIPLE_INDICES, FORBID_CLOSED_INDICES, IGNORE_THROTTLED;
IGNORE_UNAVAILABLE, IGNORE_ALIASES, ALLOW_NO_INDICES, FORBID_ALIASES_TO_MULTIPLE_INDICES, FORBID_CLOSED_INDICES, IGNORE_THROTTLED
}
}

View File

@ -15,13 +15,12 @@
*/
package org.springframework.data.elasticsearch.core.query;
import java.util.function.Function;
import org.springframework.data.domain.Sort;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import java.util.function.BiFunction;
import java.util.function.Function;
/**
* Extends the {@link Sort.Order} with properties that can be set on Elasticsearch order options.
*
@ -143,18 +142,19 @@ public class Order extends Sort.Order {
}
public static class Nested {
private String path;
@Nullable private Query filter;
private final String path;
@Nullable private final Query filter;
@Nullable private Integer maxChildren = null;
@Nullable private Nested nested;
@Nullable private final Nested nested;
public static Nested of(String path, Function<Nested.Builder, Nested.Builder> builderFunction) {
Assert.notNull(path, "path must not be null");
Assert.notNull(builderFunction, "builderFunction must not be null");
Assert.notNull(path, "path must not be null");
Assert.notNull(builderFunction, "builderFunction must not be null");
return builderFunction.apply(builder(path)).build();
return builderFunction.apply(builder(path)).build();
}
public Nested(String path, @Nullable Query filter, @Nullable Integer maxChildren, @Nullable Nested nested) {
Assert.notNull(path, "path must not be null");
@ -189,7 +189,7 @@ public class Order extends Sort.Order {
}
public static class Builder {
private String path;
private final String path;
@Nullable private Query filter = null;
@Nullable private Integer maxChildren = null;
@Nullable private Nested nested = null;
@ -203,8 +203,9 @@ public class Order extends Sort.Order {
/**
* Sets the filter query for a nested sort.<br/>
* Note: This cannot be a {@link CriteriaQuery}, as that would be sent as a nested query within the filter,
* use a {@link org.springframework.data.elasticsearch.client.elc.NativeQuery} or {@link StringQuery} instead.
* Note: This cannot be a {@link CriteriaQuery}, as that would be sent as a nested query within the filter, use a
* {@link org.springframework.data.elasticsearch.client.elc.NativeQuery} or {@link StringQuery} instead.
*
* @param filter the filter to set
* @return this builder
* @throws IllegalArgumentException when a {@link CriteriaQuery} is passed.

View File

@ -447,7 +447,7 @@ public interface Query {
@Nullable
default PointInTime getPointInTime() {
return null;
};
}
/**
* returns the number of documents that are requested when the reactive code does a batched search operation. This is
@ -469,7 +469,8 @@ public interface Query {
/**
* @since 5.1
*/
@Nullable EnumSet<IndicesOptions.WildcardStates> getExpandWildcards();
@Nullable
EnumSet<IndicesOptions.WildcardStates> getExpandWildcards();
/**
* @return a possible empty list of docvalue_field values to be set on the query.

View File

@ -26,7 +26,7 @@ import org.springframework.util.Assert;
*/
public class UpdateResponse {
private Result result;
private final Result result;
public UpdateResponse(Result result) {
@ -47,6 +47,6 @@ public class UpdateResponse {
}
public enum Result {
CREATED, UPDATED, DELETED, NOT_FOUND, NOOP;
CREATED, UPDATED, DELETED, NOT_FOUND, NOOP
}
}

View File

@ -22,6 +22,7 @@ import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.BaseQuery;
import org.springframework.data.elasticsearch.core.query.DeleteQuery;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.data.repository.query.ParametersParameterAccessor;
import org.springframework.data.repository.query.QueryMethod;
@ -94,7 +95,7 @@ public abstract class AbstractElasticsearchRepositoryQuery implements Repository
if (isDeleteQuery()) {
result = countOrGetDocumentsForDelete(query, parameterAccessor);
elasticsearchOperations.delete(query, clazz, index);
elasticsearchOperations.delete(DeleteQuery.builder(query).build(), clazz, index);
elasticsearchOperations.indexOps(index).refresh();
} else if (isCountQuery()) {
result = elasticsearchOperations.count(query, clazz, index);

View File

@ -28,6 +28,7 @@ import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersiste
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.BaseQuery;
import org.springframework.data.elasticsearch.core.query.ByQueryResponse;
import org.springframework.data.elasticsearch.core.query.DeleteQuery;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.data.elasticsearch.repository.query.ReactiveElasticsearchQueryExecution.ResultProcessingConverter;
import org.springframework.data.elasticsearch.repository.query.ReactiveElasticsearchQueryExecution.ResultProcessingExecution;
@ -133,7 +134,8 @@ abstract class AbstractReactiveElasticsearchRepositoryQuery implements Repositor
ReactiveElasticsearchOperations operations) {
if (isDeleteQuery()) {
return (query, type, targetType, indexCoordinates) -> operations.delete(query, type, indexCoordinates)
return (query, type, targetType, indexCoordinates) -> operations
.delete(DeleteQuery.builder(query).build(), type, indexCoordinates)
.map(ByQueryResponse::getDeleted);
} else if (isCountQuery()) {
return (query, type, targetType, indexCoordinates) -> operations.count(query, type, indexCoordinates);

View File

@ -29,7 +29,7 @@ import org.springframework.data.util.TypeInformation;
* @author Peter-Josef Meisch
* @since 5.2
*/
class ElasticsearchParameter extends Parameter {
public class ElasticsearchParameter extends Parameter {
/**
* Creates a new {@link ElasticsearchParameter}.

View File

@ -15,12 +15,12 @@
*/
package org.springframework.data.elasticsearch.repository.query;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import org.springframework.core.MethodParameter;
import org.springframework.data.repository.query.Parameters;
import org.springframework.data.repository.query.ParametersSource;
import org.springframework.data.util.TypeInformation;
/**
@ -33,10 +33,13 @@ public class ElasticsearchParameters extends Parameters<ElasticsearchParameters,
private final List<ElasticsearchParameter> scriptedFields = new ArrayList<>();
private final List<ElasticsearchParameter> runtimeFields = new ArrayList<>();
public ElasticsearchParameters(Method method, TypeInformation<?> domainType) {
public ElasticsearchParameters(ParametersSource parametersSource) {
super(method, parameter -> new ElasticsearchParameter(parameter, domainType));
super(parametersSource,
parameter -> new ElasticsearchParameter(parameter, parametersSource.getDomainTypeInformation()));
var domainType = parametersSource.getDomainTypeInformation();
var method = parametersSource.getMethod();
int parameterCount = method.getParameterCount();
for (int i = 0; i < parameterCount; i++) {
MethodParameter methodParameter = new MethodParameter(method, i);
@ -50,7 +53,6 @@ public class ElasticsearchParameters extends Parameters<ElasticsearchParameters,
runtimeFields.add(parameter);
}
}
}
private ElasticsearchParameter parameterFactory(MethodParameter methodParameter, TypeInformation<?> domainType) {

View File

@ -47,6 +47,7 @@ import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.projection.ProjectionFactory;
import org.springframework.data.repository.core.RepositoryMetadata;
import org.springframework.data.repository.query.Parameters;
import org.springframework.data.repository.query.ParametersSource;
import org.springframework.data.repository.query.QueryMethod;
import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider;
import org.springframework.data.repository.util.QueryExecutionConverters;
@ -101,9 +102,16 @@ public class ElasticsearchQueryMethod extends QueryMethod {
verifyCountQueryTypes();
}
@SuppressWarnings("removal")
@Override
@Deprecated
protected Parameters<?, ?> createParameters(Method method, TypeInformation<?> domainType) {
return new ElasticsearchParameters(method, domainType);
return new ElasticsearchParameters(ParametersSource.of(method));
}
@Override
protected Parameters<?, ?> createParameters(ParametersSource parametersSource) {
return new ElasticsearchParameters(parametersSource);
}
protected void verifyCountQueryTypes() {
@ -347,7 +355,7 @@ public class ElasticsearchQueryMethod extends QueryMethod {
/*
* Copied from the QueryMethod class adding support for collections of SearchHit instances. No static method here.
*/
private Class<? extends Object> potentiallyUnwrapReturnTypeFor(RepositoryMetadata metadata, Method method) {
private Class<?> potentiallyUnwrapReturnTypeFor(RepositoryMetadata metadata, Method method) {
TypeInformation<?> returnType = metadata.getReturnType(method);
if (!QueryExecutionConverters.supports(returnType.getType())
&& !ReactiveWrapperConverters.supports(returnType.getType())) {

View File

@ -22,7 +22,7 @@ import java.util.ArrayList;
import java.util.List;
import org.springframework.data.repository.util.ReactiveWrapperConverters;
import org.springframework.data.repository.util.ReactiveWrappers;
import org.springframework.data.util.ReactiveWrappers;
/**
* @author Christoph Strobl

View File

@ -101,11 +101,6 @@ public class ReactiveElasticsearchQueryMethod extends ElasticsearchQueryMethod {
}
}
@Override
protected ElasticsearchParameters createParameters(Method method, TypeInformation<?> domainType) {
return new ElasticsearchParameters(method, domainType);
}
/**
* Check if the given {@link org.springframework.data.repository.query.QueryMethod} receives a reactive parameter
* wrapper as one of its parameters.

View File

@ -38,6 +38,7 @@ import org.springframework.data.elasticsearch.core.SearchPage;
import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentEntity;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.BaseQuery;
import org.springframework.data.elasticsearch.core.query.DeleteQuery;
import org.springframework.data.elasticsearch.core.query.MoreLikeThisQuery;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.data.elasticsearch.core.routing.RoutingResolver;
@ -186,7 +187,6 @@ public class SimpleElasticsearchRepository<T, ID> implements ElasticsearchReposi
Assert.notNull(entity, "Cannot save 'null' entity.");
// noinspection DataFlowIssue
return executeAndRefresh(operations -> operations.save(entity, getIndexCoordinates()));
}
@ -195,7 +195,6 @@ public class SimpleElasticsearchRepository<T, ID> implements ElasticsearchReposi
Assert.notNull(entity, "entity must not be null");
// noinspection DataFlowIssue
return executeAndRefresh(operations -> operations.save(entity, getIndexCoordinates()), refreshPolicy);
}
@ -309,7 +308,7 @@ public class SimpleElasticsearchRepository<T, ID> implements ElasticsearchReposi
Query query = operations.idsQuery(idStrings);
executeAndRefresh((OperationsCallback<Void>) operations -> {
operations.delete(query, entityClass, getIndexCoordinates());
operations.delete(DeleteQuery.builder(query).build(), entityClass, getIndexCoordinates());
return null;
});
}
@ -331,7 +330,7 @@ public class SimpleElasticsearchRepository<T, ID> implements ElasticsearchReposi
Query query = operations.idsQuery(idStrings);
executeAndRefresh((OperationsCallback<Void>) operations -> {
operations.delete(query, entityClass, getIndexCoordinates());
operations.delete(DeleteQuery.builder(query).build(), entityClass, getIndexCoordinates());
return null;
});
}
@ -365,7 +364,6 @@ public class SimpleElasticsearchRepository<T, ID> implements ElasticsearchReposi
if (id != null) {
executeAndRefresh(operations -> {
var ops = routing != null ? operations.withRouting(RoutingResolver.just(routing)) : operations;
// noinspection DataFlowIssue
return ops.delete(stringIdRepresentation(id), indexCoordinates);
});
}
@ -377,7 +375,6 @@ public class SimpleElasticsearchRepository<T, ID> implements ElasticsearchReposi
if (id != null) {
executeAndRefresh(operations -> {
var ops = routing != null ? operations.withRouting(RoutingResolver.just(routing)) : operations;
// noinspection DataFlowIssue
return ops.delete(stringIdRepresentation(id), indexCoordinates);
}, refreshPolicy);
}
@ -395,7 +392,7 @@ public class SimpleElasticsearchRepository<T, ID> implements ElasticsearchReposi
@Override
public void deleteAll(@Nullable RefreshPolicy refreshPolicy) {
executeAndRefresh((OperationsCallback<Void>) operations -> {
operations.delete(Query.findAll(), entityClass, getIndexCoordinates());
operations.delete(DeleteQuery.builder(Query.findAll()).build(), entityClass, getIndexCoordinates());
return null;
}, refreshPolicy);
}

View File

@ -32,6 +32,7 @@ import org.springframework.data.elasticsearch.core.SearchHit;
import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentEntity;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.BaseQuery;
import org.springframework.data.elasticsearch.core.query.DeleteQuery;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.data.elasticsearch.core.routing.RoutingResolver;
import org.springframework.data.elasticsearch.repository.ReactiveElasticsearchRepository;
@ -286,7 +287,8 @@ public class SimpleReactiveElasticsearchRepository<T, ID> implements ReactiveEla
.collectList() //
.map(operations::idsQuery) //
.flatMap(
query -> operations.delete(query, entityInformation.getJavaType(), entityInformation.getIndexCoordinates())) //
query -> operations.delete(DeleteQuery.builder(query).build(), entityInformation.getJavaType(),
entityInformation.getIndexCoordinates())) //
.then(doRefresh());
}
@ -301,7 +303,8 @@ public class SimpleReactiveElasticsearchRepository<T, ID> implements ReactiveEla
.collectList() //
.map(operations::idsQuery) //
.flatMap(
query -> operationsWithRefreshPolicy.delete(query, entityInformation.getJavaType(),
query -> operationsWithRefreshPolicy.delete(DeleteQuery.builder(query).build(),
entityInformation.getJavaType(),
entityInformation.getIndexCoordinates())) //
.then(doRefresh());
}
@ -333,7 +336,8 @@ public class SimpleReactiveElasticsearchRepository<T, ID> implements ReactiveEla
.collectList() //
.map(operations::idsQuery)
.flatMap(
query -> operations.delete(query, entityInformation.getJavaType(), entityInformation.getIndexCoordinates())) //
query -> operations.delete(DeleteQuery.builder(query).build(), entityInformation.getJavaType(),
entityInformation.getIndexCoordinates())) //
.then(doRefresh());
}
@ -349,21 +353,25 @@ public class SimpleReactiveElasticsearchRepository<T, ID> implements ReactiveEla
.collectList() //
.map(operations::idsQuery)
.flatMap(
query -> operationsWithRefreshPolicy.delete(query, entityInformation.getJavaType(),
query -> operationsWithRefreshPolicy.delete(DeleteQuery.builder(query).build(),
entityInformation.getJavaType(),
entityInformation.getIndexCoordinates())) //
.then(doRefresh());
}
@Override
public Mono<Void> deleteAll() {
return operations.delete(Query.findAll(), entityInformation.getJavaType(), entityInformation.getIndexCoordinates()) //
return operations
.delete(DeleteQuery.builder(Query.findAll()).build(), entityInformation.getJavaType(),
entityInformation.getIndexCoordinates()) //
.then(doRefresh());
}
@Override
public Mono<Void> deleteAll(@Nullable RefreshPolicy refreshPolicy) {
return operations.withRefreshPolicy(refreshPolicy)
.delete(Query.findAll(), entityInformation.getJavaType(), entityInformation.getIndexCoordinates()) //
.delete(DeleteQuery.builder(Query.findAll()).build(), entityInformation.getJavaType(),
entityInformation.getIndexCoordinates()) //
.then(doRefresh());
}

View File

@ -40,7 +40,7 @@ public class DefaultStringObjectMap<T extends StringObjectMap<T>> implements Str
this(new LinkedHashMap<>());
}
public DefaultStringObjectMap(Map<String, ? extends Object> map) {
public DefaultStringObjectMap(Map<String, ?> map) {
this.delegate = new LinkedHashMap<>(map);
}

View File

@ -1,11 +1,9 @@
@file:Suppress("unused")
package org.springframework.data.elasticsearch.core
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates
import org.springframework.data.elasticsearch.core.query.BulkOptions
import org.springframework.data.elasticsearch.core.query.ByQueryResponse
import org.springframework.data.elasticsearch.core.query.IndexQuery
import org.springframework.data.elasticsearch.core.query.Query
import org.springframework.data.elasticsearch.core.query.UpdateQuery
import org.springframework.data.elasticsearch.core.query.*
/**
* Extension functions for [DocumentOperations] methods that take a Class parameter leveraging reified type parameters.
@ -39,8 +37,8 @@ inline fun <reified T : Any> DocumentOperations.bulkIndex(
inline fun <reified T : Any> DocumentOperations.bulkUpdate(queries: List<UpdateQuery>) =
bulkUpdate(queries, T::class.java)
inline fun <reified T : Any> DocumentOperations.delete(id: String): String =
inline fun <reified T : Any> DocumentOperations.deleteById(id: String): String =
delete(id, T::class.java)
inline fun <reified T : Any> DocumentOperations.delete(query: Query): ByQueryResponse =
delete(query, T::class.java)
inline fun <reified T : Any> DocumentOperations.deleteByQuery(query: Query): ByQueryResponse =
delete(DeleteQuery.builder(query).build(), T::class.java)

View File

@ -3,13 +3,13 @@ package org.springframework.data.elasticsearch.core
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates
/**
* Extension functions for [ReacctiveElasticsearchOperations] methods that take a Class parameter leveraging reified type parameters.
* Extension functions for [ReactiveElasticsearchOperations] methods that take a Class parameter leveraging reified type parameters.
* @author Peter-Josef Meisch
* @since 5.2
*/
inline fun <reified T : Any> ReactiveElasticsearchOperations.indexOps(): ReactiveIndexOperations =
indexOps(T::class.java)
indexOps(T::class.java)
inline fun <reified T : Any> ReactiveElasticsearchOperations.getIndexCoordinatesFor(): IndexCoordinates =
getIndexCoordinatesFor(T::class.java)
getIndexCoordinatesFor(T::class.java)

View File

@ -1,3 +1,5 @@
@file:Suppress("unused")
package org.springframework.data.elasticsearch.core
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates
@ -14,13 +16,38 @@ import reactor.core.publisher.Mono
inline fun <reified T : Any> ReactiveSearchOperations.count(): Mono<Long> = count(T::class.java)
inline fun <reified T : Any> ReactiveSearchOperations.count(query: Query): Mono<Long> = count(query, T::class.java)
inline fun <reified T : Any> ReactiveSearchOperations.count(query: Query, index: IndexCoordinates): Mono<Long> = count(query, T::class.java, index)
inline fun <reified T : Any> ReactiveSearchOperations.search(query: Query): Flux<SearchHit<T>> = search(query, T::class.java)
inline fun <reified T : Any> ReactiveSearchOperations.search(query: Query, index: IndexCoordinates): Flux<SearchHit<T>> = search(query, T::class.java, index)
inline fun <reified T : Any> ReactiveSearchOperations.searchForPage(query: Query): Mono<SearchPage<T>> = searchForPage(query, T::class.java)
inline fun <reified T : Any> ReactiveSearchOperations.searchForPage(query: Query, index: IndexCoordinates): Mono<SearchPage<T>> = searchForPage(query, T::class.java, index)
inline fun <reified T : Any> ReactiveSearchOperations.searchForHits(query: Query): Mono<ReactiveSearchHits<T>> = searchForHits(query, T::class.java)
inline fun <reified T : Any> ReactiveSearchOperations.aggregate(query: Query): Flux<out AggregationContainer<*>> = aggregate(query, T::class.java)
inline fun <reified T : Any> ReactiveSearchOperations.aggregate(query: Query, index: IndexCoordinates): Flux<out AggregationContainer<*>> = aggregate(query, T::class.java, index)
inline fun <reified T : Any> ReactiveSearchOperations.suggest(query: Query): Mono<Suggest> = suggest(query, T::class.java)
inline fun <reified T : Any> ReactiveSearchOperations.suggest(query: Query, index: IndexCoordinates): Mono<Suggest> = suggest(query, T::class.java, index)
inline fun <reified T : Any> ReactiveSearchOperations.count(query: Query, index: IndexCoordinates): Mono<Long> =
count(query, T::class.java, index)
inline fun <reified T : Any> ReactiveSearchOperations.search(query: Query): Flux<SearchHit<T>> =
search(query, T::class.java)
inline fun <reified T : Any> ReactiveSearchOperations.search(
query: Query,
index: IndexCoordinates
): Flux<SearchHit<T>> = search(query, T::class.java, index)
inline fun <reified T : Any> ReactiveSearchOperations.searchForPage(query: Query): Mono<SearchPage<T>> =
searchForPage(query, T::class.java)
inline fun <reified T : Any> ReactiveSearchOperations.searchForPage(
query: Query,
index: IndexCoordinates
): Mono<SearchPage<T>> = searchForPage(query, T::class.java, index)
inline fun <reified T : Any> ReactiveSearchOperations.searchForHits(query: Query): Mono<ReactiveSearchHits<T>> =
searchForHits(query, T::class.java)
inline fun <reified T : Any> ReactiveSearchOperations.aggregate(query: Query): Flux<out AggregationContainer<*>> =
aggregate(query, T::class.java)
inline fun <reified T : Any> ReactiveSearchOperations.aggregate(
query: Query,
index: IndexCoordinates
): Flux<out AggregationContainer<*>> = aggregate(query, T::class.java, index)
inline fun <reified T : Any> ReactiveSearchOperations.suggest(query: Query): Mono<Suggest> =
suggest(query, T::class.java)
inline fun <reified T : Any> ReactiveSearchOperations.suggest(query: Query, index: IndexCoordinates): Mono<Suggest> =
suggest(query, T::class.java, index)

View File

@ -13,6 +13,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@file:Suppress("unused")
package org.springframework.data.elasticsearch.core
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates
@ -28,27 +30,35 @@ import org.springframework.data.elasticsearch.core.query.Query
inline fun <reified T : Any> SearchOperations.count(query: Query): Long = count(query, T::class.java)
inline fun <reified T : Any> SearchOperations.searchOne(query: Query): SearchHit<T>? = searchOne(query, T::class.java)
inline fun <reified T : Any> SearchOperations.searchOne(query: Query, index: IndexCoordinates): SearchHit<T>? = searchOne(query, T::class.java, index)
inline fun <reified T : Any> SearchOperations.multiSearch(queries: List<out Query>): List<SearchHits<T>> =
multiSearch(queries, T::class.java)
inline fun <reified T : Any> SearchOperations.searchOne(query: Query, index: IndexCoordinates): SearchHit<T>? =
searchOne(query, T::class.java, index)
inline fun <reified T : Any> SearchOperations.multiSearch(queries: List<out Query>, index: IndexCoordinates): List<SearchHits<T>> =
multiSearch(queries, T::class.java, index)
inline fun <reified T : Any> SearchOperations.multiSearch(queries: List<out Query>): List<SearchHits<T>> =
multiSearch(queries, T::class.java)
inline fun <reified T : Any> SearchOperations.multiSearch(
queries: List<out Query>,
index: IndexCoordinates
): List<SearchHits<T>> =
multiSearch(queries, T::class.java, index)
inline fun <reified T : Any> SearchOperations.search(query: Query): SearchHits<T> =
search(query, T::class.java)
search(query, T::class.java)
inline fun <reified T : Any> SearchOperations.search(query: Query, index: IndexCoordinates): SearchHits<T> =
search(query, T::class.java, index)
search(query, T::class.java, index)
inline fun <reified T : Any> SearchOperations.search(query: MoreLikeThisQuery): SearchHits<T> =
search(query, T::class.java)
search(query, T::class.java)
inline fun <reified T : Any> SearchOperations.search(query: MoreLikeThisQuery, index: IndexCoordinates): SearchHits<T> =
search(query, T::class.java, index)
search(query, T::class.java, index)
inline fun <reified T : Any> SearchOperations.searchForStream(query: Query): SearchHitsIterator<T> =
searchForStream(query, T::class.java)
searchForStream(query, T::class.java)
inline fun <reified T : Any> SearchOperations.searchForStream(query: Query, index: IndexCoordinates): SearchHitsIterator<T> =
searchForStream(query, T::class.java, index)
inline fun <reified T : Any> SearchOperations.searchForStream(
query: Query,
index: IndexCoordinates
): SearchHitsIterator<T> =
searchForStream(query, T::class.java, index)

View File

@ -249,6 +249,40 @@ public abstract class NestedObjectIntegrationTests {
public void shouldSearchBooksForPersonInitialLevelNestedType() {
// given
var foo = getPerson();
Car car = new Car();
car.setName("Saturn");
car.setModel("Imprezza");
Person bar = new Person();
bar.setId("2");
bar.setName("Bar");
bar.setCar(Collections.singletonList(car));
List<IndexQuery> indexQueries = new ArrayList<>();
IndexQuery indexQuery1 = new IndexQuery();
indexQuery1.setId(foo.getId());
indexQuery1.setObject(foo);
IndexQuery indexQuery2 = new IndexQuery();
indexQuery2.setId(bar.getId());
indexQuery2.setObject(bar);
indexQueries.add(indexQuery1);
indexQueries.add(indexQuery2);
operations.bulkIndex(indexQueries, Person.class);
// when
Query searchQuery = getNestedQuery3();
SearchHits<Person> persons = operations.search(searchQuery, Person.class);
// then
assertThat(persons).hasSize(1);
}
private static Person getPerson() {
List<Car> cars = new ArrayList<>();
Car saturn = new Car();
@ -288,36 +322,7 @@ public abstract class NestedObjectIntegrationTests {
foo.setId("1");
foo.setCar(cars);
foo.setBooks(Arrays.asList(java, spring));
Car car = new Car();
car.setName("Saturn");
car.setModel("Imprezza");
Person bar = new Person();
bar.setId("2");
bar.setName("Bar");
bar.setCar(Collections.singletonList(car));
List<IndexQuery> indexQueries = new ArrayList<>();
IndexQuery indexQuery1 = new IndexQuery();
indexQuery1.setId(foo.getId());
indexQuery1.setObject(foo);
IndexQuery indexQuery2 = new IndexQuery();
indexQuery2.setId(bar.getId());
indexQuery2.setObject(bar);
indexQueries.add(indexQuery1);
indexQueries.add(indexQuery2);
operations.bulkIndex(indexQueries, Person.class);
// when
Query searchQuery = getNestedQuery3();
SearchHits<Person> persons = operations.search(searchQuery, Person.class);
// then
assertThat(persons).hasSize(1);
return foo;
}
@NotNull

View File

@ -46,9 +46,9 @@ import org.springframework.lang.Nullable;
*/
public class ComposableAnnotationsUnitTest {
private static SimpleElasticsearchMappingContext mappingContext = new SimpleElasticsearchMappingContext();
private static MappingElasticsearchConverter converter = new MappingElasticsearchConverter(mappingContext);
private static MappingBuilder mappingBuilder = new MappingBuilder(converter);
private static final SimpleElasticsearchMappingContext mappingContext = new SimpleElasticsearchMappingContext();
private static final MappingElasticsearchConverter converter = new MappingElasticsearchConverter(mappingContext);
private static final MappingBuilder mappingBuilder = new MappingBuilder(converter);
@Test // DATAES-362
@DisplayName("Document annotation should be composable")

View File

@ -19,11 +19,16 @@ import reactor.blockhound.BlockHound;
import reactor.blockhound.BlockingOperationError;
import reactor.blockhound.integration.BlockHoundIntegration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* @author Peter-Josef Meisch
*/
public class BlockHoundIntegrationCustomizer implements BlockHoundIntegration {
private static final Log LOGGER = LogFactory.getLog(BlockHoundIntegrationCustomizer.class);
@Override
public void applyTo(BlockHound.Builder builder) {
// Elasticsearch classes reading from the classpath on initialization, needed for parsing Elasticsearch responses
@ -35,7 +40,7 @@ public class BlockHoundIntegrationCustomizer implements BlockHoundIntegration {
.allowBlockingCallsInside("jakarta.json.spi.JsonProvider", "provider") //
;
builder.blockingMethodCallback(it -> {
new Error(it.toString()).printStackTrace();
LOGGER.error("BlockHound error", new Error(it.toString()));
throw new BlockingOperationError(it);
});

View File

@ -50,14 +50,13 @@ import com.github.tomakehurst.wiremock.stubbing.StubMapping;
/**
* We need hoverfly for testing the reactive code to use a proxy. Wiremock cannot intercept the proxy calls as WebClient
* uses HTTP CONNECT on proxy requests which wiremock does not support.
* <br/>
* Note: since 5.0 we do not use the WebClient for
* the reactive code anymore, so this might be handled with two wiremocks, but there is no real need to change this test
* setup.
* uses HTTP CONNECT on proxy requests which wiremock does not support. <br/>
* Note: since 5.0 we do not use the WebClient for the reactive code anymore, so this might be handled with two
* wiremocks, but there is no real need to change this test setup.
*
* @author Peter-Josef Meisch
*/
@SuppressWarnings("UastIncorrectHttpHeaderInspection")
@HoverflyCapture(path = "target/hoverfly", config = @HoverflyConfig(proxyLocalHost = true, plainHttpTunneling = true))
@ExtendWith(HoverflyExtension.class)
public class RestClientsTest {
@ -208,15 +207,14 @@ public class RestClientsTest {
ClientUnderTest clientUnderTest = clientUnderTestFactory.create(clientConfiguration);
class Foo {
public String id;
public final String id;
Foo(String id) {
this.id = id;
}
}
;
clientUnderTest.index(new Foo("42"));
clientUnderTest.index(new Foo("42"));
verify(putRequestedFor(urlMatching(urlPattern)) //
.withHeader("Accept", new EqualToPattern("application/vnd.elasticsearch+json;compatible-with=7")) //

View File

@ -60,7 +60,7 @@ import org.springframework.lang.Nullable;
*/
public class CriteriaQueryMappingUnitTests {
private JsonpMapper mapper = new JacksonJsonpMapper();
private final JsonpMapper mapper = new JacksonJsonpMapper();
MappingElasticsearchConverter mappingElasticsearchConverter;

View File

@ -33,7 +33,7 @@ import org.springframework.data.elasticsearch.core.query.Criteria;
@SuppressWarnings("ConstantConditions")
class CriteriaQueryProcessorUnitTests {
private JsonpMapper mapper = new JacksonJsonpMapper();
private final JsonpMapper mapper = new JacksonJsonpMapper();
private final CriteriaQueryProcessor queryProcessor = new CriteriaQueryProcessor();
@ -99,7 +99,7 @@ class CriteriaQueryProcessorUnitTests {
]
}
}
""";
""";
Criteria criteria = new Criteria("field1").is("value1").or("field2").is("value2");
@ -152,7 +152,7 @@ class CriteriaQueryProcessorUnitTests {
]
}
}
""";
""";
Criteria criteria = new Criteria("field1").is("value1") //
.or("field2").is("value2") //
@ -295,7 +295,7 @@ class CriteriaQueryProcessorUnitTests {
]
}
}
""";
""";
Criteria criteria = Criteria.or()
.subCriteria(new Criteria("lastName").is("Miller")

View File

@ -67,7 +67,7 @@ import org.springframework.lang.Nullable;
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
public class DevTests {
private static Log LOGGER = LogFactory.getLog(DevTests.class);
private static final Log LOGGER = LogFactory.getLog(DevTests.class);
private static final String INDEX = "appdata-index";
@ -218,13 +218,13 @@ public class DevTests {
try {
HealthResponse healthResponse = clusterHealthImperative(healthRequest);
} catch (IOException e) {
e.printStackTrace();
LOGGER.error("error", e);
}
try {
HealthResponse healthResponse = clusterHealthReactive(healthRequest);
} catch (IOException e) {
e.printStackTrace();
LOGGER.error("error", e);
}
}
@ -301,12 +301,12 @@ public class DevTests {
try {
indexImperative(indexRequestBuilder.apply(1));
} catch (IOException e) {
e.printStackTrace();
LOGGER.error("error", e);
}
try {
indexReactive(indexRequestBuilder.apply(2));
} catch (IOException e) {
e.printStackTrace();
LOGGER.error("error", e);
}
}
@ -334,14 +334,14 @@ public class DevTests {
searchResponse = searchImperative(searchRequest);
assertThat(searchResponse).isNotNull();
} catch (IOException e) {
e.printStackTrace();
LOGGER.error("error", e);
}
try {
searchResponse = searchReactive(searchRequest);
assertThat(searchResponse).isNotNull();
} catch (Exception e) {
e.printStackTrace();
LOGGER.error("error", e);
}
}

View File

@ -38,6 +38,7 @@ import com.github.tomakehurst.wiremock.junit5.WireMockExtension;
* Tests that need to check the data produced by the Elasticsearch client
* @author Peter-Josef Meisch
*/
@SuppressWarnings("UastIncorrectHttpHeaderInspection")
@ExtendWith(SpringExtension.class)
public class ELCWiremockTests {

View File

@ -19,6 +19,8 @@ import static org.assertj.core.api.Assertions.*;
import co.elastic.clients.json.jackson.JacksonJsonpMapper;
import java.util.List;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.springframework.data.annotation.Id;
@ -29,12 +31,9 @@ import org.springframework.data.elasticsearch.core.convert.MappingElasticsearchC
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.mapping.SimpleElasticsearchMappingContext;
import org.springframework.data.elasticsearch.core.query.DocValueField;
import org.springframework.data.elasticsearch.core.query.Query;
import org.springframework.data.elasticsearch.core.query.StringQuery;
import org.springframework.lang.Nullable;
import java.util.List;
/**
* @author Peter-Josef Meisch
*/
@ -42,16 +41,16 @@ class RequestConverterTest {
private static final SimpleElasticsearchMappingContext mappingContext = new SimpleElasticsearchMappingContext();
private static final MappingElasticsearchConverter converter = new MappingElasticsearchConverter(mappingContext);
private JacksonJsonpMapper jsonpMapper = new JacksonJsonpMapper();
private RequestConverter requestConverter = new RequestConverter(converter, jsonpMapper);
private final JacksonJsonpMapper jsonpMapper = new JacksonJsonpMapper();
private final RequestConverter requestConverter = new RequestConverter(converter, jsonpMapper);
@Test // #2316
@DisplayName("should add docvalue_fields")
void shouldAddDocvalueFields() {
var docValueFields = List.of( //
new DocValueField("field1"), //
new DocValueField("field2", "format2") //
new DocValueField("field1"), //
new DocValueField("field2", "format2") //
);
// doesn't matter what type of query is used, the relevant part for docvalue_fields is in the base builder.
var query = StringQuery.builder("""
@ -59,10 +58,11 @@ class RequestConverterTest {
"match_all":{}
}
""") //
.withDocValueFields(docValueFields) //
.build();
.withDocValueFields(docValueFields) //
.build();
var searchRequest = requestConverter.searchRequest(query,null, SampleEntity.class, IndexCoordinates.of("foo"), true);
var searchRequest = requestConverter.searchRequest(query, null, SampleEntity.class, IndexCoordinates.of("foo"),
true);
var fieldAndFormats = searchRequest.docvalueFields();
assertThat(fieldAndFormats).hasSize(2);

View File

@ -15,6 +15,8 @@
*/
package org.springframework.data.elasticsearch.client.elc;
import static org.assertj.core.api.Assertions.*;
import co.elastic.clients.elasticsearch._types.ShardFailure;
import co.elastic.clients.elasticsearch._types.ShardStatistics;
import co.elastic.clients.elasticsearch.core.search.HitsMetadata;
@ -37,8 +39,6 @@ import org.springframework.data.elasticsearch.core.document.SearchDocumentRespon
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import static org.assertj.core.api.Assertions.*;
/**
* Tests for the factory class to create {@link SearchDocumentResponse} instances.
*
@ -48,7 +48,7 @@ import static org.assertj.core.api.Assertions.*;
*/
class SearchDocumentResponseBuilderUnitTests {
private JacksonJsonpMapper jsonpMapper = new JacksonJsonpMapper();
private final JacksonJsonpMapper jsonpMapper = new JacksonJsonpMapper();
@Test // #2681
void shouldGetPhraseSuggestion() throws JSONException {

View File

@ -37,6 +37,7 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
* @author Peter-Josef Meisch
* @since 4.4
*/
@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection")
@ExtendWith(SpringExtension.class)
@ContextConfiguration
public class ReactiveElasticsearchConfigurationELCTests {
@ -59,7 +60,6 @@ public class ReactiveElasticsearchConfigurationELCTests {
* using a repository with an entity that is set to createIndex = false as we have no elastic running for this test
* and just check that all the necessary beans are created.
*/
// @Autowired private WebClient webClient;
@Autowired private ReactiveElasticsearchClient reactiveElasticsearchClient;
@Autowired private ReactiveElasticsearchOperations reactiveElasticsearchOperations;
@Autowired private CreateIndexFalseRepository repository;

View File

@ -18,8 +18,6 @@ package org.springframework.data.elasticsearch.config.notnested;
import static org.assertj.core.api.Assertions.*;
import static org.springframework.data.elasticsearch.annotations.FieldType.*;
import java.lang.Double;
import java.lang.Long;
import java.util.UUID;
import org.junit.jupiter.api.BeforeEach;
@ -51,6 +49,7 @@ import org.springframework.lang.Nullable;
* @author Gad Akuka
* @author Peter-Josef Meisch
*/
@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection")
@SpringIntegrationTest
public abstract class EnableRepositoriesIntegrationTests implements ApplicationContextAware {

View File

@ -612,7 +612,8 @@ public abstract class ElasticsearchIntegrationTests {
// when
Query query = getTermQuery("id", documentId);
operations.delete(query, SampleEntity.class, IndexCoordinates.of(indexNameProvider.indexName()));
operations.delete(DeleteQuery.builder(query).build(), SampleEntity.class,
IndexCoordinates.of(indexNameProvider.indexName()));
// then
Query searchQuery = getTermQuery("id", documentId);
@ -643,7 +644,7 @@ public abstract class ElasticsearchIntegrationTests {
// when
Query query = getTermQuery("message", "foo");
operations.delete(query, SampleEntity.class, IndexCoordinates.of(MULTI_INDEX_ALL));
operations.delete(DeleteQuery.builder(query).build(), SampleEntity.class, IndexCoordinates.of(MULTI_INDEX_ALL));
// then
assertThat(operations.count(query, IndexCoordinates.of(MULTI_INDEX_1_NAME, MULTI_INDEX_2_NAME))).isEqualTo(0);
@ -674,7 +675,7 @@ public abstract class ElasticsearchIntegrationTests {
// when
Query query = getTermQuery("message", "negative");
operations.delete(query, SampleEntity.class, IndexCoordinates.of("test-index-*"));
operations.delete(DeleteQuery.builder(query).build(), SampleEntity.class, IndexCoordinates.of("test-index-*"));
operations.indexOps(IndexCoordinates.of(MULTI_INDEX_1_NAME)).refresh();
operations.indexOps(IndexCoordinates.of(MULTI_INDEX_2_NAME)).refresh();
@ -1036,7 +1037,8 @@ public abstract class ElasticsearchIntegrationTests {
CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("message").contains("test"));
// when
operations.delete(criteriaQuery, SampleEntity.class, IndexCoordinates.of(indexNameProvider.indexName()));
operations.delete(DeleteQuery.builder(criteriaQuery).build(), SampleEntity.class,
IndexCoordinates.of(indexNameProvider.indexName()));
// then
StringQuery stringQuery = new StringQuery(MATCH_ALL);
@ -2494,7 +2496,8 @@ public abstract class ElasticsearchIntegrationTests {
// when
Query query = operations.idsQuery(Arrays.asList(documentIdToDelete));
operations.delete(query, SampleEntity.class, IndexCoordinates.of(indexNameProvider.indexName()));
operations.delete(DeleteQuery.builder(query).build(), SampleEntity.class,
IndexCoordinates.of(indexNameProvider.indexName()));
// then
// document with id "remainingDocumentId" should still be indexed
@ -2524,7 +2527,8 @@ public abstract class ElasticsearchIntegrationTests {
// when
CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("id").is(documentIdToDelete));
operations.delete(criteriaQuery, SampleEntity.class, IndexCoordinates.of(indexNameProvider.indexName()));
operations.delete(DeleteQuery.builder(criteriaQuery).build(), SampleEntity.class,
IndexCoordinates.of(indexNameProvider.indexName()));
// then
// document with id "remainingDocumentId" should still be indexed
@ -2861,17 +2865,29 @@ public abstract class ElasticsearchIntegrationTests {
}
@Document(indexName = MULTI_INDEX_2_NAME)
class ResultAggregator {
static class ResultAggregator {
private String id;
private String firstName;
private String lastName;
private final String id;
private final String firstName;
private final String lastName;
ResultAggregator(String id, String firstName, String lastName) {
this.id = id;
this.firstName = firstName;
this.lastName = lastName;
}
public String getId() {
return id;
}
public String getFirstName() {
return firstName;
}
public String getLastName() {
return lastName;
}
}
@Test // DATAES-709
@ -3457,7 +3473,7 @@ public abstract class ElasticsearchIntegrationTests {
private void shouldDeleteEntityWithJoinFields(String qId2, String aId2) throws Exception {
operations.delete(getQueryForParentId("answer", qId2, qId2), SampleJoinEntity.class,
operations.delete(DeleteQuery.builder(getQueryForParentId("answer", qId2, qId2)).build(), SampleJoinEntity.class,
IndexCoordinates.of(indexNameProvider.indexName()));
SearchHits<SampleJoinEntity> deletedHits = operations.search(getQueryForParentId("answer", qId2, null),

View File

@ -291,7 +291,7 @@ class EntityOperationsUnitTests {
}
@Document(indexName = "entity-operations-test")
static record EntityFromRecord(
record EntityFromRecord(
@Id @Nullable String id,
@Field(type = FieldType.Text) @Nullable String text,
@Version @Nullable Long version,

View File

@ -25,10 +25,6 @@ import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import java.lang.Boolean;
import java.lang.Integer;
import java.lang.Long;
import java.lang.Object;
import java.time.Duration;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
@ -608,7 +604,7 @@ public abstract class ReactiveElasticsearchIntegrationTests {
CriteriaQuery query = new CriteriaQuery(new Criteria("message").contains("test"));
operations.delete(query, SampleEntity.class) //
operations.delete(DeleteQuery.builder(query).build(), SampleEntity.class) //
.as(StepVerifier::create) //
.consumeNextWith(byQueryResponse -> assertThat(byQueryResponse.getDeleted()).isEqualTo(0L)).verifyComplete();
}
@ -631,7 +627,7 @@ public abstract class ReactiveElasticsearchIntegrationTests {
Query query = getBuilderWithTermQuery("message", "test").build();
operations.delete(query, SampleEntity.class, IndexCoordinates.of(indexPrefix + '*')) //
operations.delete(DeleteQuery.builder(query).build(), SampleEntity.class, IndexCoordinates.of(indexPrefix + '*')) //
.map(ByQueryResponse::getDeleted) //
.as(StepVerifier::create) //
.expectNext(2L) //
@ -658,7 +654,7 @@ public abstract class ReactiveElasticsearchIntegrationTests {
Query query = getBuilderWithTermQuery("message", "negative").build();
operations.delete(query, SampleEntity.class, IndexCoordinates.of(indexPrefix + '*')) //
operations.delete(DeleteQuery.builder(query).build(), SampleEntity.class, IndexCoordinates.of(indexPrefix + '*')) //
.map(ByQueryResponse::getDeleted) //
.as(StepVerifier::create) //
.expectNext(0L) //
@ -674,7 +670,7 @@ public abstract class ReactiveElasticsearchIntegrationTests {
CriteriaQuery query = new CriteriaQuery(new Criteria("message").contains("test"));
operations.delete(query, SampleEntity.class) //
operations.delete(DeleteQuery.builder(query).build(), SampleEntity.class) //
.map(ByQueryResponse::getDeleted) //
.as(StepVerifier::create) //
.expectNext(2L) //
@ -688,7 +684,7 @@ public abstract class ReactiveElasticsearchIntegrationTests {
CriteriaQuery query = new CriteriaQuery(new Criteria("message").contains("luke"));
operations.delete(query, SampleEntity.class) //
operations.delete(DeleteQuery.builder(query).build(), SampleEntity.class) //
.map(ByQueryResponse::getDeleted) //
.as(StepVerifier::create) //
.expectNext(0L) //
@ -1114,6 +1110,7 @@ public abstract class ReactiveElasticsearchIntegrationTests {
try {
JSONAssert.assertEquals(expectedMappings, indexInformation.getMapping().toJson(), false);
} catch (JSONException e) {
// noinspection CallToPrintStackTrace
e.printStackTrace();
}
}).verifyComplete();

View File

@ -67,7 +67,7 @@ public abstract class ReactiveSearchTemplateIntegrationTests {
"size": 100
}
""";
private Script script = Script.builder() //
private final Script script = Script.builder() //
.withId("testScript") //
.withLanguage("mustache") //
.withSource(SCRIPT) //

View File

@ -26,8 +26,8 @@ import org.springframework.data.elasticsearch.core.convert.ElasticsearchCustomCo
*/
class ElasticsearchCustomConversionsUnitTests {
private byte[] bytes = new byte[] { 0x01, 0x02, 0x03, 0x04 };
private String base64 = "AQIDBA==";
private final byte[] bytes = new byte[] { 0x01, 0x02, 0x03, 0x04 };
private final String base64 = "AQIDBA==";
@Test
void shouldConvertFromByteArrayToBase64() {

View File

@ -27,7 +27,7 @@ import org.springframework.data.elasticsearch.annotations.DateFormat;
*/
class ElasticsearchDateConverterUnitTests {
private ZonedDateTime zdt = ZonedDateTime.now(ZoneId.of("Europe/Berlin"));
private final ZonedDateTime zdt = ZonedDateTime.now(ZoneId.of("Europe/Berlin"));
@ParameterizedTest // DATAES-716
@EnumSource(DateFormat.class)

View File

@ -1707,7 +1707,6 @@ public class MappingElasticsearchConverterUnitTests {
var entity = mappingElasticsearchConverter.read(EntityWithCollections.class, source);
assertThat(entity.getChildrenList()).hasSize(1);
// noinspection ConstantConditions
assertThat(entity.getChildrenList().get(0).getName()).isEqualTo("child");
}
@ -1727,13 +1726,12 @@ public class MappingElasticsearchConverterUnitTests {
}
]
}
""";
""";
Document source = Document.parse(json);
var entity = mappingElasticsearchConverter.read(EntityWithCollections.class, source);
assertThat(entity.getChildrenList()).hasSize(2);
// noinspection ConstantConditions
assertThat(entity.getChildrenList().get(0).getName()).isEqualTo("child1");
assertThat(entity.getChildrenList().get(1).getName()).isEqualTo("child2");
}
@ -1755,7 +1753,6 @@ public class MappingElasticsearchConverterUnitTests {
var entity = mappingElasticsearchConverter.read(EntityWithCollections.class, source);
assertThat(entity.getChildrenSet()).hasSize(1);
// noinspection ConstantConditions
assertThat(entity.getChildrenSet().iterator().next().getName()).isEqualTo("child");
}
@ -1781,7 +1778,6 @@ public class MappingElasticsearchConverterUnitTests {
var entity = mappingElasticsearchConverter.read(EntityWithCollections.class, source);
assertThat(entity.getChildrenSet()).hasSize(2);
// noinspection ConstantConditions
List<String> names = entity.getChildrenSet().stream().map(EntityWithCollections.Child::getName)
.collect(Collectors.toList());
assertThat(names).containsExactlyInAnyOrder("child1", "child2");
@ -1872,7 +1868,6 @@ public class MappingElasticsearchConverterUnitTests {
var entity = mappingElasticsearchConverter.read(ImmutableEntityWithCollections.class, source);
assertThat(entity.getChildrenList()).hasSize(1);
// noinspection ConstantConditions
assertThat(entity.getChildrenList().get(0).getName()).isEqualTo("child");
}
@ -1892,13 +1887,12 @@ public class MappingElasticsearchConverterUnitTests {
}
]
}
""";
""";
Document source = Document.parse(json);
var entity = mappingElasticsearchConverter.read(ImmutableEntityWithCollections.class, source);
assertThat(entity.getChildrenList()).hasSize(2);
// noinspection ConstantConditions
assertThat(entity.getChildrenList().get(0).getName()).isEqualTo("child1");
assertThat(entity.getChildrenList().get(1).getName()).isEqualTo("child2");
}
@ -1920,7 +1914,6 @@ public class MappingElasticsearchConverterUnitTests {
var entity = mappingElasticsearchConverter.read(ImmutableEntityWithCollections.class, source);
assertThat(entity.getChildrenSet()).hasSize(1);
// noinspection ConstantConditions
assertThat(entity.getChildrenSet().iterator().next().getName()).isEqualTo("child");
}
@ -1946,7 +1939,6 @@ public class MappingElasticsearchConverterUnitTests {
var entity = mappingElasticsearchConverter.read(ImmutableEntityWithCollections.class, source);
assertThat(entity.getChildrenSet()).hasSize(2);
// noinspection ConstantConditions
List<String> names = entity.getChildrenSet().stream().map(ImmutableEntityWithCollections.Child::getName)
.collect(Collectors.toList());
assertThat(names).containsExactlyInAnyOrder("child1", "child2");
@ -2338,7 +2330,7 @@ public class MappingElasticsearchConverterUnitTests {
MAN("1"), MACHINE("0");
String theValue;
final String theValue;
Gender(String theValue) {
this.theValue = theValue;
@ -2973,16 +2965,16 @@ public class MappingElasticsearchConverterUnitTests {
private static final class ImmutableEntityWithCollections {
@Field(type = FieldType.Keyword)
@Nullable private List<String> stringList;
@Nullable private final List<String> stringList;
@Field(type = FieldType.Keyword)
@Nullable private Set<String> stringSet;
@Nullable private final Set<String> stringSet;
@Field(type = FieldType.Object)
@Nullable private List<Child> childrenList;
@Nullable private final List<Child> childrenList;
@Field(type = FieldType.Object)
@Nullable private Set<Child> childrenSet;
@Nullable private final Set<Child> childrenSet;
public ImmutableEntityWithCollections(@Nullable List<String> stringList, @Nullable Set<String> stringSet,
@Nullable List<Child> childrenList, @Nullable Set<Child> childrenSet) {
@ -3015,7 +3007,7 @@ public class MappingElasticsearchConverterUnitTests {
public static class Child {
@Field(type = FieldType.Keyword)
@Nullable private String name;
@Nullable private final String name;
public Child(@Nullable String name) {
this.name = name;
@ -3254,6 +3246,6 @@ public class MappingElasticsearchConverterUnitTests {
Assert.notNull(o, "o must not be null");
return new StringBuilder().append(o.toString()).reverse().toString();
return new StringBuilder().append(o).reverse().toString();
}
}

View File

@ -23,12 +23,10 @@ import java.util.List;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIf;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.GeoPointField;
import org.springframework.data.elasticsearch.core.AbstractElasticsearchTemplate;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.SearchHit;
import org.springframework.data.elasticsearch.core.SearchHits;
@ -391,7 +389,7 @@ public abstract class GeoIntegrationTests {
static class AuthorMarkerEntityBuilder {
private AuthorMarkerEntity result;
private final AuthorMarkerEntity result;
public AuthorMarkerEntityBuilder(String id) {
result = new AuthorMarkerEntity(id);

View File

@ -151,7 +151,7 @@ public abstract class IndexTemplateIntegrationTests {
void shouldDeleteComponentTemplate() {
IndexOperations indexOps = operations.indexOps(IndexCoordinates.of("dont-care"));
String templateName = "template" + UUID.randomUUID().toString();
String templateName = "template" + UUID.randomUUID();
var putComponentTemplateRequest = PutComponentTemplateRequest.builder() //
.withName(templateName) //
.withTemplateData(ComponentTemplateRequestData.builder() //
@ -293,7 +293,7 @@ public abstract class IndexTemplateIntegrationTests {
@Test // DATAES-612
void shouldReturnNullOnNonExistingGetTemplate() {
String templateName = "template" + UUID.randomUUID().toString();
String templateName = "template" + UUID.randomUUID();
IndexOperations indexOps = operations.indexOps(IndexCoordinates.of("dont-care"));
GetTemplateRequest getTemplateRequest = new GetTemplateRequest(templateName);
@ -349,7 +349,7 @@ public abstract class IndexTemplateIntegrationTests {
void shouldCheckExists() {
IndexOperations indexOps = operations.indexOps(IndexCoordinates.of("dont-care"));
String templateName = "template" + UUID.randomUUID().toString();
String templateName = "template" + UUID.randomUUID();
ExistsTemplateRequest existsTemplateRequest = new ExistsTemplateRequest(templateName);
boolean exists = indexOps.existsTemplate(existsTemplateRequest);
@ -372,7 +372,7 @@ public abstract class IndexTemplateIntegrationTests {
IndexOperations indexOps = operations.indexOps(IndexCoordinates.of("dont-care"));
String templateName = "template" + UUID.randomUUID().toString();
String templateName = "template" + UUID.randomUUID();
ExistsTemplateRequest existsTemplateRequest = new ExistsTemplateRequest(templateName);
PutTemplateRequest putTemplateRequest = PutTemplateRequest.builder(templateName, "log-*") //

View File

@ -516,7 +516,7 @@ public abstract class MappingBuilderIntegrationTests extends MappingContextBaseT
@Nullable
@Id private String id;
@Field(type = FieldType.Nested, ignoreFields = { "users" }) private Set<Group> groups = new HashSet<>();
@Field(type = FieldType.Nested, ignoreFields = { "users" }) private final Set<Group> groups = new HashSet<>();
}
@Document(indexName = "#{@indexNameProvider.indexName()}")
@ -525,7 +525,7 @@ public abstract class MappingBuilderIntegrationTests extends MappingContextBaseT
@Nullable
@Id String id;
@Field(type = FieldType.Nested, ignoreFields = { "groups" }) private Set<User> users = new HashSet<>();
@Field(type = FieldType.Nested, ignoreFields = { "groups" }) private final Set<User> users = new HashSet<>();
}
@Document(indexName = "#{@indexNameProvider.indexName()}")

View File

@ -79,7 +79,7 @@ public class MappingBuilderUnitTests extends MappingContextBaseTests {
}
}
}
""";
""";
String mapping = getMappingBuilder().buildPropertyMapping(SampleTransientEntity.class);
@ -122,7 +122,7 @@ public class MappingBuilderUnitTests extends MappingContextBaseTests {
}
}
}
""";
""";
String mapping = getMappingBuilder().buildPropertyMapping(SampleInheritedEntity.class);
@ -257,7 +257,7 @@ public class MappingBuilderUnitTests extends MappingContextBaseTests {
}
}
}
""";
""";
String mapping = getMappingBuilder().buildPropertyMapping(FieldNameEntity.MappingEntity.class);

View File

@ -23,7 +23,7 @@ import org.springframework.lang.Nullable;
*/
public class MappingParametersTest extends MappingContextBaseTests {
private ElasticsearchPersistentEntity<?> entity = elasticsearchConverter.get().getMappingContext()
private final ElasticsearchPersistentEntity<?> entity = elasticsearchConverter.get().getMappingContext()
.getRequiredPersistentEntity(AnnotatedClass.class);
@Test // DATAES-621

View File

@ -177,7 +177,7 @@ public abstract class ReactiveIndexTemplateIntegrationTests {
void shouldDeleteComponentTemplate() {
var blockingIndexOps = blocking(operations.indexOps(IndexCoordinates.of("dont-care")));
String templateName = "template" + UUID.randomUUID().toString();
String templateName = "template" + UUID.randomUUID();
var putComponentTemplateRequest = PutComponentTemplateRequest.builder() //
.withName(templateName) //
.withTemplateData(ComponentTemplateRequestData.builder() //
@ -321,7 +321,7 @@ public abstract class ReactiveIndexTemplateIntegrationTests {
@Test // DATAES-612
void shouldReturnNullOnNonExistingGetTemplate() {
String templateName = "template" + UUID.randomUUID().toString();
String templateName = "template" + UUID.randomUUID();
GetTemplateRequest getTemplateRequest = new GetTemplateRequest(templateName);
indexOperations.getTemplate(getTemplateRequest) //
@ -373,7 +373,7 @@ public abstract class ReactiveIndexTemplateIntegrationTests {
@Test // DATAES-612
void shouldCheckTemplateExists() {
String templateName = "template" + UUID.randomUUID().toString();
String templateName = "template" + UUID.randomUUID();
ExistsTemplateRequest existsTemplateRequest = new ExistsTemplateRequest(templateName);
boolean exists = blockingIndexOperations.existsTemplate(existsTemplateRequest);
@ -394,7 +394,7 @@ public abstract class ReactiveIndexTemplateIntegrationTests {
@Test // DATAES-612
void shouldDeleteTemplate() {
String templateName = "template" + UUID.randomUUID().toString();
String templateName = "template" + UUID.randomUUID();
ExistsTemplateRequest existsTemplateRequest = new ExistsTemplateRequest(templateName);
PutTemplateRequest putTemplateRequest = PutTemplateRequest.builder(templateName, "log-*") //

View File

@ -76,7 +76,7 @@ public class SimpleDynamicTemplatesMappingTests extends MappingContextBaseTests
@Id private String id;
@Nullable
@Field(type = FieldType.Object) private Map<String, String> names = new HashMap<>();
@Field(type = FieldType.Object) private final Map<String, String> names = new HashMap<>();
}
/**
@ -91,7 +91,7 @@ public class SimpleDynamicTemplatesMappingTests extends MappingContextBaseTests
@Id private String id;
@Nullable
@Field(type = FieldType.Object) private Map<String, String> names = new HashMap<>();
@Field(type = FieldType.Object) private final Map<String, String> names = new HashMap<>();
}
}

View File

@ -87,7 +87,7 @@ public abstract class ReactiveSearchAfterIntegrationTests {
query.setSearchAfter(searchAfter);
List<SearchHit<Entity>> searchHits = operations.search(query, Entity.class).collectList().block();
if (searchHits.size() == 0) {
if (searchHits.isEmpty()) {
break;
}
foundEntities.addAll(searchHits.stream().map(SearchHit::getContent).collect(Collectors.toList()));

View File

@ -23,7 +23,6 @@ import java.util.Collection;
import java.util.List;
import org.json.JSONException;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.annotation.Id;
@ -47,6 +46,7 @@ import org.springframework.lang.Nullable;
* @author Peter-Josef Meisch
* @author Haibo Liu
*/
@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection")
@SpringIntegrationTest
public abstract class ElasticsearchPartQueryIntegrationTests {
@ -55,9 +55,6 @@ public abstract class ElasticsearchPartQueryIntegrationTests {
@Autowired protected ElasticsearchOperations operations;
@BeforeEach
public void setUp() {}
@Test
void findByName() throws NoSuchMethodException, JSONException {
String methodName = "findByName";

View File

@ -123,7 +123,7 @@ public abstract class NativeQueryIntegrationTests {
]
}
}
""").build();
""").build();
var nativeQuery = NativeQuery.builder().withQuery(stringQuery).build();
var searchHits = operations.search(nativeQuery, SampleEntity.class);

View File

@ -395,7 +395,7 @@ public abstract class ReactiveScriptedAndRuntimeFieldsIntegrationTests {
}
}
}
""")
""")
Flux<SearchHit<SAREntity>> findWithScriptedFields(Integer value,
org.springframework.data.elasticsearch.core.query.ScriptedField scriptedField1,
org.springframework.data.elasticsearch.core.query.ScriptedField scriptedField2);
@ -410,7 +410,7 @@ public abstract class ReactiveScriptedAndRuntimeFieldsIntegrationTests {
}
}
}
""")
""")
Flux<SearchHit<SAREntity>> findWithRuntimeFields(Integer value, RuntimeField runtimeField1,
RuntimeField runtimeField2);
}

View File

@ -39,9 +39,8 @@ import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
class DefaultRoutingResolverUnitTest {
@Autowired private ApplicationContext applicationContext;
private SimpleElasticsearchMappingContext mappingContext;
@Nullable private RoutingResolver routingResolver;
@Nullable private RoutingResolver routingResolver;
@Configuration
static class Config {
@ -53,7 +52,7 @@ class DefaultRoutingResolverUnitTest {
@BeforeEach
void setUp() {
mappingContext = new SimpleElasticsearchMappingContext();
SimpleElasticsearchMappingContext mappingContext = new SimpleElasticsearchMappingContext();
mappingContext.setApplicationContext(applicationContext);
routingResolver = new DefaultRoutingResolver(mappingContext);

View File

@ -269,7 +269,7 @@ public abstract class CompletionIntegrationTests {
static class CompletionEntityBuilder {
private CompletionEntity result;
private final CompletionEntity result;
public CompletionEntityBuilder(String id) {
result = new CompletionEntity(id);
@ -354,7 +354,7 @@ public abstract class CompletionIntegrationTests {
*/
static class AnnotatedCompletionEntityBuilder {
private AnnotatedCompletionEntity result;
private final AnnotatedCompletionEntity result;
public AnnotatedCompletionEntityBuilder(String id) {
result = new AnnotatedCompletionEntity(id);

View File

@ -15,9 +15,13 @@
*/
package org.springframework.data.elasticsearch.core.suggest;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.*;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Order;
@ -243,7 +247,7 @@ public abstract class CompletionWithContextsIntegrationTests {
*/
static class ContextCompletionEntityBuilder {
private ContextCompletionEntity result;
private final ContextCompletionEntity result;
public ContextCompletionEntityBuilder(String id) {
result = new ContextCompletionEntity(id);

View File

@ -159,7 +159,7 @@ public abstract class ReactiveSuggestIntegrationTests {
static class CompletionEntityBuilder {
private CompletionEntity result;
private final CompletionEntity result;
public CompletionEntityBuilder(String id) {
result = new CompletionEntity(id);

View File

@ -48,6 +48,7 @@ class ElasticsearchOperationsProducer {
return new ElasticsearchTemplate(elasticsearchClient);
}
@SuppressWarnings("EmptyMethod")
@PreDestroy
public void shutdown() {
// remove everything to avoid conflicts with other tests in case server not shut down properly

View File

@ -27,7 +27,7 @@ import org.springframework.data.elasticsearch.repositories.complex.custommethod.
*/
public class ComplexElasticsearchRepositoryManualWiringImpl implements ComplexElasticsearchRepositoryCustom {
private ElasticsearchOperations operations;
private final ElasticsearchOperations operations;
public ComplexElasticsearchRepositoryManualWiringImpl(ElasticsearchOperations operations) {
this.operations = operations;

View File

@ -61,20 +61,18 @@ public abstract class UUIDElasticsearchRepositoryIntegrationTests {
@Autowired private SampleUUIDKeyedElasticsearchRepository repository;
@Autowired ElasticsearchOperations operations;
private IndexOperations indexOperations;
@Autowired IndexNameProvider indexNameProvider;
@Autowired IndexNameProvider indexNameProvider;
@BeforeEach
public void before() {
indexNameProvider.increment();
indexOperations = operations.indexOps(SampleEntityUUIDKeyed.class);
indexOperations.createWithMapping();
operations.indexOps(SampleEntityUUIDKeyed.class).createWithMapping();
}
@Test
@org.junit.jupiter.api.Order(Integer.MAX_VALUE)
void cleanup() {
operations.indexOps(IndexCoordinates.of(indexNameProvider.getPrefix() + "*")).delete();
operations.indexOps(IndexCoordinates.of(indexNameProvider.getPrefix() + '*')).delete();
}
@Test

View File

@ -58,7 +58,7 @@ public class ReactiveElasticsearchRepositoriesRegistrarTests {
Assertions.assertThat(repository).isNotNull();
// there is an index to delete after this test
operations.indexOps(ReactiveElasticsearchRepositoryConfigurationExtensionUnitTests.SwCharacter.class).delete()
operations.indexOps(SampleEntity.class).delete()
.block();
}

View File

@ -26,7 +26,7 @@ import org.springframework.core.env.Environment;
import org.springframework.core.env.StandardEnvironment;
import org.springframework.core.io.ResourceLoader;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.type.StandardAnnotationMetadata;
import org.springframework.core.type.AnnotationMetadata;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.repository.ReactiveElasticsearchRepository;
import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource;
@ -40,13 +40,13 @@ import org.springframework.data.repository.reactive.ReactiveCrudRepository;
*/
public class ReactiveElasticsearchRepositoryConfigurationExtensionUnitTests {
StandardAnnotationMetadata metadata = new StandardAnnotationMetadata(Config.class, true);
AnnotationMetadata metadata = AnnotationMetadata.introspect(Config.class);
ResourceLoader loader = new PathMatchingResourcePatternResolver();
Environment environment = new StandardEnvironment();
BeanDefinitionRegistry registry = new DefaultListableBeanFactory();
RepositoryConfigurationSource configurationSource = new AnnotationRepositoryConfigurationSource(metadata,
EnableReactiveElasticsearchRepositories.class, loader, environment, registry);
EnableReactiveElasticsearchRepositories.class, loader, environment, registry, null);
@Test // DATAES-519
public void isStrictMatchIfDomainTypeIsAnnotatedWithDocument() {

View File

@ -23,7 +23,6 @@ import java.util.List;
import java.util.stream.Collectors;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
@ -400,7 +399,6 @@ abstract class QueryKeywordsIntegrationTests {
}
}
@SuppressWarnings({ "SpringDataRepositoryMethodParametersInspection", "SpringDataMethodInconsistencyInspection" })
interface ProductRepository extends ElasticsearchRepository<Product, String> {
List<Product> findByName(@Nullable String name);

View File

@ -22,8 +22,6 @@ import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import java.lang.Boolean;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Order;
@ -139,7 +137,6 @@ public abstract class ReactiveQueryKeywordsIntegrationTests {
.verifyComplete();
}
@SuppressWarnings("SpringDataMethodInconsistencyInspection")
interface SampleRepository extends ReactiveElasticsearchRepository<SampleEntity, String> {
Flux<SearchHit<SampleEntity>> findByMessageExists();

View File

@ -28,6 +28,7 @@ import org.junit.jupiter.api.Test;
* @author Peter-Josef Meisch
* @since 5.0
*/
@SuppressWarnings("UastIncorrectHttpHeaderInspection")
class HttpHeadersTest {
public static final String X_TEST_HEADER = "X-Test-Header";

View File

@ -24,7 +24,7 @@ public class IndexBuilder {
indexQuery.setObject(object);
return indexQuery;
} catch (IllegalAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
}

View File

@ -22,6 +22,7 @@ package org.springframework.data.elasticsearch.utils.geohash;
* <br/>
* Utilities for common Bit twiddling methods. Borrowed heavily from Lucene (org.apache.lucene.util.BitUtil).
*/
@SuppressWarnings("CStyleArrayDeclaration")
public class BitUtil { // magic numbers for bit interleaving
private static final long MAGIC[] = { 0x5555555555555555L, 0x3333333333333333L, 0x0F0F0F0F0F0F0F0FL,
0x00FF00FF00FF00FFL, 0x0000FFFF0000FFFFL, 0x00000000FFFFFFFFL, 0xAAAAAAAAAAAAAAAAL };
@ -65,7 +66,7 @@ public class BitUtil { // magic numbers for bit interleaving
/**
* flip flops odd with even bits
*/
public static final long flipFlop(final long b) {
public static long flipFlop(final long b) {
return ((b & MAGIC[6]) >>> 1) | ((b & MAGIC[0]) << 1);
}
}

View File

@ -130,7 +130,7 @@ public class Geohash {
* @param neighbors list to add the neighbors to
* @return the given list
*/
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, E neighbors) {
public static <E extends Collection<? super String>> E addNeighbors(String geohash, E neighbors) {
return addNeighborsAtLevel(geohash, geohash.length(), neighbors);
}
@ -142,8 +142,8 @@ public class Geohash {
* @param neighbors list to add the neighbors to
* @return the given list
*/
public static final <E extends Collection<? super String>> E addNeighborsAtLevel(String geohash, int level,
E neighbors) {
public static <E extends Collection<? super String>> E addNeighborsAtLevel(String geohash, int level,
E neighbors) {
String south = getNeighbor(geohash, level, 0, -1);
String north = getNeighbor(geohash, level, 0, +1);
if (north != null) {
@ -173,7 +173,7 @@ public class Geohash {
* @param dy delta of the second grid coordinate (must be -1, 0 or +1)
* @return geohash of the defined cell
*/
public static final String getNeighbor(String geohash, int level, int dx, int dy) {
public static String getNeighbor(String geohash, int level, int dx, int dy) {
int cell = BASE_32_STRING.indexOf(geohash.charAt(level - 1));
// Decoding the Geohash bit pattern to determine grid coordinates
@ -221,14 +221,14 @@ public class Geohash {
/**
* Encode a string geohash to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
*/
public static final long longEncode(String hash) {
public static long longEncode(String hash) {
return longEncode(hash, hash.length());
}
/**
* Encode lon/lat to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
*/
public static final long longEncode(final double lon, final double lat, final int level) {
public static long longEncode(final double lon, final double lat, final int level) {
// shift to appropriate level
final short msf = (short) (((12 - level) * 5) + (MORTON_OFFSET - 2));
return ((encodeLatLon(lat, lon) >>> msf) << 4) | level;
@ -237,14 +237,14 @@ public class Geohash {
/**
* Encode to a geohash string from full resolution longitude, latitude)
*/
public static final String stringEncode(final double lon, final double lat) {
public static String stringEncode(final double lon, final double lat) {
return stringEncode(lon, lat, 12);
}
/**
* Encode to a level specific geohash string from full resolution longitude, latitude
*/
public static final String stringEncode(final double lon, final double lat, final int level) {
public static String stringEncode(final double lon, final double lat, final int level) {
// convert to geohashlong
long interleaved = encodeLatLon(lat, lon);
interleaved >>>= (((PRECISION - level) * 5) + (MORTON_OFFSET - 2));
@ -255,7 +255,7 @@ public class Geohash {
/**
* Encode to a geohash string from the geohash based long format
*/
public static final String stringEncode(long geoHashLong) {
public static String stringEncode(long geoHashLong) {
int level = (int) geoHashLong & 15;
geoHashLong >>>= 4;
char[] chars = new char[level];
@ -350,12 +350,12 @@ public class Geohash {
}
/** returns the latitude value from the string based geohash */
public static final double decodeLatitude(final String geohash) {
public static double decodeLatitude(final String geohash) {
return decodeLatitude(Geohash.mortonEncode(geohash));
}
/** returns the latitude value from the string based geohash */
public static final double decodeLongitude(final String geohash) {
public static double decodeLongitude(final String geohash) {
return decodeLongitude(Geohash.mortonEncode(geohash));
}

View File

@ -44,25 +44,7 @@ package org.springframework.data.elasticsearch.utils.geohash;
*/
public interface GeometryVisitor<T, E extends Exception> {
/*
T visit(Circle circle) throws E;
T visit(GeometryCollection<?> collection) throws E;
T visit(Line line) throws E;
T visit(LinearRing ring) throws E;
T visit(MultiLine multiLine) throws E;
T visit(MultiPoint multiPoint) throws E;
T visit(MultiPolygon multiPolygon) throws E;
*/
T visit(Point point) throws E;
/*
T visit(Polygon polygon) throws E;
*/
T visit(Rectangle rectangle) throws E;
}

View File

@ -36,11 +36,11 @@ public class WellKnownText {
public static final String COMMA = ",";
public static final String NAN = "NaN";
private final String NUMBER = "<NUMBER>";
private final String EOF = "END-OF-STREAM";
private final String EOL = "END-OF-LINE";
private static final String NUMBER = "<NUMBER>";
private static final String EOF = "END-OF-STREAM";
private static final String EOL = "END-OF-LINE";
private final boolean coerce;
@SuppressWarnings("FieldCanBeLocal") private final boolean coerce;
private final GeometryValidator validator;
public WellKnownText(boolean coerce, GeometryValidator validator) {
@ -76,7 +76,7 @@ public class WellKnownText {
private void visitPoint(double lon, double lat, double alt) {
sb.append(lon).append(SPACE).append(lat);
if (Double.isNaN(alt) == false) {
if (!Double.isNaN(alt)) {
sb.append(SPACE).append(alt);
}
}
@ -138,13 +138,11 @@ public class WellKnownText {
*/
private Geometry parseGeometry(StreamTokenizer stream) throws IOException, ParseException {
final String type = nextWord(stream).toLowerCase(Locale.ROOT);
switch (type) {
case "point":
return parsePoint(stream);
case "bbox":
return parseBBox(stream);
}
throw new IllegalArgumentException("Unknown geometry type: " + type);
return switch (type) {
case "point" -> parsePoint(stream);
case "bbox" -> parseBBox(stream);
default -> throw new IllegalArgumentException("Unknown geometry type: " + type);
};
}
private Point parsePoint(StreamTokenizer stream) throws IOException, ParseException {
@ -178,7 +176,7 @@ public class WellKnownText {
if (isNumberNext(stream)) {
alts.add(nextNumber(stream));
}
if (alts.isEmpty() == false && alts.size() != lons.size()) {
if (!alts.isEmpty() && alts.size() != lons.size()) {
throw new ParseException("coordinate dimensions do not match: " + tokenString(stream), stream.lineno());
}
}
@ -187,7 +185,6 @@ public class WellKnownText {
if (nextEmptyOrOpen(stream).equals(EMPTY)) {
return Rectangle.EMPTY;
}
// TODO: Add 3D support
double minLon = nextNumber(stream);
nextComma(stream);
double maxLon = nextNumber(stream);
@ -203,18 +200,16 @@ public class WellKnownText {
* next word in the stream
*/
private String nextWord(StreamTokenizer stream) throws ParseException, IOException {
switch (stream.nextToken()) {
case StreamTokenizer.TT_WORD:
return switch (stream.nextToken()) {
case StreamTokenizer.TT_WORD -> {
final String word = stream.sval;
return word.equalsIgnoreCase(EMPTY) ? EMPTY : word;
case '(':
return LPAREN;
case ')':
return RPAREN;
case ',':
return COMMA;
}
throw new ParseException("expected word but found: " + tokenString(stream), stream.lineno());
yield word.equalsIgnoreCase(EMPTY) ? EMPTY : word;
}
case '(' -> LPAREN;
case ')' -> RPAREN;
case ',' -> COMMA;
default -> throw new ParseException("expected word but found: " + tokenString(stream), stream.lineno());
};
}
private double nextNumber(StreamTokenizer stream) throws IOException, ParseException {
@ -238,7 +233,7 @@ public class WellKnownText {
case StreamTokenizer.TT_EOF -> EOF;
case StreamTokenizer.TT_EOL -> EOL;
case StreamTokenizer.TT_NUMBER -> NUMBER;
default -> "'" + (char) stream.ttype + "'";
default -> "'" + (char) stream.ttype + '\'';
};
}

View File

@ -29,19 +29,19 @@ import org.springframework.test.context.ContextConfiguration
@ContextConfiguration(classes = [CoroutineRepositoryELCIntegrationTests.Config::class])
class CoroutineRepositoryELCIntegrationTests : CoroutineRepositoryIntegrationTests() {
@Configuration
@Import(ReactiveElasticsearchTemplateConfiguration::class)
@EnableReactiveElasticsearchRepositories(
considerNestedRepositories = true,
includeFilters = [ComponentScan.Filter(
type = FilterType.ASSIGNABLE_TYPE,
classes = [CoroutineElasticsearchRepository::class]
)]
)
open class Config {
@Bean
open fun indexNameProvider(): IndexNameProvider {
return IndexNameProvider("coroutine-repository")
}
}
@Configuration
@Import(ReactiveElasticsearchTemplateConfiguration::class)
@EnableReactiveElasticsearchRepositories(
considerNestedRepositories = true,
includeFilters = [ComponentScan.Filter(
type = FilterType.ASSIGNABLE_TYPE,
classes = [CoroutineElasticsearchRepository::class]
)]
)
open class Config {
@Bean
open fun indexNameProvider(): IndexNameProvider {
return IndexNameProvider("coroutine-repository")
}
}
}

View File

@ -21,13 +21,16 @@ import kotlinx.coroutines.flow.toList
import kotlinx.coroutines.test.runTest
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Order
import org.junit.jupiter.api.Test
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.data.annotation.Id
import org.springframework.data.elasticsearch.annotations.Document
import org.springframework.data.elasticsearch.annotations.Field
import org.springframework.data.elasticsearch.annotations.FieldType
import org.springframework.data.elasticsearch.core.ReactiveElasticsearchOperations
import org.springframework.data.elasticsearch.core.SearchHit
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates
import org.springframework.data.elasticsearch.junit.jupiter.SpringIntegrationTest
import org.springframework.data.elasticsearch.repository.CoroutineElasticsearchRepository
import org.springframework.data.elasticsearch.utils.IndexNameProvider
@ -37,65 +40,73 @@ import org.springframework.data.elasticsearch.utils.IndexNameProvider
* @author Peter-Josef Meisch
* @since 5.2
*/
@Suppress("SpringJavaInjectionPointsAutowiringInspection")
@SpringIntegrationTest
abstract class CoroutineRepositoryIntegrationTests {
@Autowired
lateinit var indexNameProvider: IndexNameProvider
@Autowired
lateinit var indexNameProvider: IndexNameProvider
@Autowired
lateinit var repository: CoroutineEntityRepository
@Autowired
lateinit var operations: ReactiveElasticsearchOperations
val entities = listOf(
Entity("1", "test"),
Entity("2", "test"),
)
@Autowired
lateinit var repository: CoroutineEntityRepository
@BeforeEach
fun setUp() = runTest {
repository.saveAll(entities).last()
}
val entities = listOf(
Entity("1", "test"),
Entity("2", "test"),
)
@Test
fun `should instantiate repository`() = runTest {
assertThat(repository).isNotNull()
}
@BeforeEach
fun setUp() = runTest {
repository.saveAll(entities).last()
}
@Test
fun `should run with method returning a list of entities`() = runTest {
@Test
@Order(Int.MAX_VALUE)
fun cleanup() {
operations.indexOps(IndexCoordinates.of(indexNameProvider.prefix + "*")).delete().block()
}
val result = repository.searchByText("test")
@Test
fun `should instantiate repository`() = runTest {
assertThat(repository).isNotNull()
}
assertThat(result).containsExactlyInAnyOrderElementsOf(entities)
}
@Test
fun `should run with method returning a list of entities`() = runTest {
@Test
fun `should run with method returning a flow of entities`() = runTest {
val result = repository.searchByText("test")
val result = repository.findByText("test").toList(mutableListOf())
assertThat(result).containsExactlyInAnyOrderElementsOf(entities)
}
assertThat(result).containsExactlyInAnyOrderElementsOf(entities)
}
@Test
fun `should run with method returning a flow of entities`() = runTest {
@Test
fun `should run with method returning a flow of SearchHit`() = runTest {
val result = repository.findByText("test").toList(mutableListOf())
val result = repository.queryByText("test").toList(mutableListOf())
assertThat(result).containsExactlyInAnyOrderElementsOf(entities)
}
assertThat(result.map { it.content }).containsExactlyInAnyOrderElementsOf(entities)
}
@Test
fun `should run with method returning a flow of SearchHit`() = runTest {
@Document(indexName = "#{@indexNameProvider.indexName()}")
data class Entity(
@Id val id: String?,
@Field(type = FieldType.Text) val text: String?,
)
val result = repository.queryByText("test").toList(mutableListOf())
interface CoroutineEntityRepository : CoroutineElasticsearchRepository<Entity, String> {
assertThat(result.map { it.content }).containsExactlyInAnyOrderElementsOf(entities)
}
suspend fun searchByText(text: String): List<Entity>
suspend fun findByText(text: String): Flow<Entity>
suspend fun queryByText(text: String): Flow<SearchHit<Entity>>
}
@Document(indexName = "#{@indexNameProvider.indexName()}")
data class Entity(
@Id val id: String?,
@Field(type = FieldType.Text) val text: String?,
)
interface CoroutineEntityRepository : CoroutineElasticsearchRepository<Entity, String> {
suspend fun searchByText(text: String): List<Entity>
suspend fun findByText(text: String): Flow<Entity>
suspend fun queryByText(text: String): Flow<SearchHit<Entity>>
}
}

View File

@ -17,46 +17,61 @@ import kotlin.coroutines.Continuation
*/
class ReactiveElasticsearchQueryMethodCoroutineUnitTests {
val projectionFactory = SpelAwareProxyProjectionFactory()
private val projectionFactory = SpelAwareProxyProjectionFactory()
interface PersonRepository : CoroutineElasticsearchRepository<Person, String> {
interface PersonRepository : CoroutineElasticsearchRepository<Person, String> {
suspend fun findSuspendAllByName(): Flow<Person>
suspend fun findSuspendAllByName(): Flow<Person>
fun findAllByName(): Flow<Person>
fun findAllByName(): Flow<Person>
suspend fun findSuspendByName(): List<Person>
}
suspend fun findSuspendByName(): List<Person>
}
@Test // #2545
internal fun `should consider methods returning Flow as collection queries`() {
@Test // #2545
internal fun `should consider methods returning Flow as collection queries`() {
val method = PersonRepository::class.java.getMethod("findAllByName")
val queryMethod = ReactiveElasticsearchQueryMethod(method, DefaultRepositoryMetadata(PersonRepository::class.java), projectionFactory, SimpleElasticsearchMappingContext())
val method = PersonRepository::class.java.getMethod("findAllByName")
val queryMethod = ReactiveElasticsearchQueryMethod(
method,
DefaultRepositoryMetadata(PersonRepository::class.java),
projectionFactory,
SimpleElasticsearchMappingContext()
)
assertThat(queryMethod.isCollectionQuery).isTrue()
}
assertThat(queryMethod.isCollectionQuery).isTrue()
}
@Test // #2545
internal fun `should consider suspended methods returning Flow as collection queries`() {
@Test // #2545
internal fun `should consider suspended methods returning Flow as collection queries`() {
val method = PersonRepository::class.java.getMethod("findSuspendAllByName", Continuation::class.java)
val queryMethod = ReactiveElasticsearchQueryMethod(method, DefaultRepositoryMetadata(PersonRepository::class.java), projectionFactory, SimpleElasticsearchMappingContext())
val method = PersonRepository::class.java.getMethod("findSuspendAllByName", Continuation::class.java)
val queryMethod = ReactiveElasticsearchQueryMethod(
method,
DefaultRepositoryMetadata(PersonRepository::class.java),
projectionFactory,
SimpleElasticsearchMappingContext()
)
assertThat(queryMethod.isCollectionQuery).isTrue()
}
assertThat(queryMethod.isCollectionQuery).isTrue()
}
@Test // #2545
internal fun `should consider suspended methods returning List as collection queries`() {
@Test // #2545
internal fun `should consider suspended methods returning List as collection queries`() {
val method = PersonRepository::class.java.getMethod("findSuspendByName", Continuation::class.java)
val queryMethod = ReactiveElasticsearchQueryMethod(method, DefaultRepositoryMetadata(PersonRepository::class.java), projectionFactory, SimpleElasticsearchMappingContext())
val method = PersonRepository::class.java.getMethod("findSuspendByName", Continuation::class.java)
val queryMethod = ReactiveElasticsearchQueryMethod(
method,
DefaultRepositoryMetadata(PersonRepository::class.java),
projectionFactory,
SimpleElasticsearchMappingContext()
)
assertThat(queryMethod.isCollectionQuery).isTrue()
}
assertThat(queryMethod.isCollectionQuery).isTrue()
}
data class Person(
@Id val id: String?,
@Field val name: String?
)
data class Person(
@Id val id: String?,
@Field val name: String?
)
}