DATAES-285 - upgrade to elasticsearch 5.x

This commit is contained in:
Artur Konczak 2017-05-02 18:21:31 +01:00 committed by Mohsin Husen
parent 17603ca8a9
commit 089d7746be
114 changed files with 1021 additions and 1375 deletions

3
.gitignore vendored
View File

@ -6,9 +6,6 @@ atlassian-ide-plugin.xml
## ignore any target dir
target
##ignore only top level data dir - local node data files for unit tests
/data
## Ignore project files created by Eclipse
.settings
.project

41
pom.xml
View File

@ -19,7 +19,7 @@
<properties>
<commonscollections>3.2.1</commonscollections>
<commonslang>2.6</commonslang>
<elasticsearch>2.4.0</elasticsearch>
<elasticsearch>5.2.1</elasticsearch>
<springdata.commons>2.0.0.BUILD-SNAPSHOT</springdata.commons>
</properties>
@ -70,6 +70,30 @@
<version>${elasticsearch}</version>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>transport</artifactId>
<version>${elasticsearch}</version>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>log4j-over-slf4j</artifactId>
<version>1.7.22</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.6.2</version>
</dependency>
<!-- Jackson JSON Mapper -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
@ -96,6 +120,12 @@
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.openwebbeans.test</groupId>
@ -120,6 +150,13 @@
<scope>test</scope>
</dependency>
<dependency><!-- required by elasticsearch -->
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>transport-netty4-client</artifactId>
<version>${elasticsearch}</version>
<!--<scope>test</scope>-->
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
@ -127,6 +164,8 @@
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -32,8 +32,6 @@ public @interface CompletionField {
String analyzer() default "simple";
boolean payloads() default false;
boolean preserveSeparators() default true;
boolean preservePositionIncrements() default true;

View File

@ -38,7 +38,7 @@ public @interface Field {
FieldType type() default FieldType.Auto;
FieldIndex index() default FieldIndex.analyzed;
boolean index() default true;
DateFormat format() default DateFormat.none;
@ -46,6 +46,8 @@ public @interface Field {
boolean store() default false;
boolean fielddata() default false;
String searchAnalyzer() default "";
String analyzer() default "";

View File

@ -21,5 +21,5 @@ package org.springframework.data.elasticsearch.annotations;
* @author Artur Konczak
*/
public enum FieldType {
String, Integer, Long, Date, Float, Double, Boolean, Object, Auto, Nested, Ip, Attachment
text, Integer, Long, Date, Float, Double, Boolean, Object, Auto, Nested, Ip, Attachment, keyword
}

View File

@ -25,8 +25,4 @@ import java.lang.annotation.*;
@Documented
public @interface GeoPointField {
boolean geoHashPrefix() default false;
String geoHashPrecision() default "0";
}

View File

@ -31,10 +31,12 @@ public @interface InnerField {
FieldType type();
FieldIndex index() default FieldIndex.analyzed;
boolean index() default true;
boolean store() default false;
boolean fielddata() default false;
String searchAnalyzer() default "";
String indexAnalyzer() default "";

View File

@ -1,127 +1,144 @@
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch.client;
import static org.elasticsearch.node.NodeBuilder.*;
import java.io.InputStream;
import org.apache.commons.lang.StringUtils;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.settings.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
/**
* NodeClientFactoryBean
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
public class NodeClientFactoryBean implements FactoryBean<NodeClient>, InitializingBean, DisposableBean {
private static final Logger logger = LoggerFactory.getLogger(NodeClientFactoryBean.class);
private boolean local;
private boolean enableHttp;
private String clusterName;
private NodeClient nodeClient;
private String pathData;
private String pathHome;
private String pathConfiguration;
NodeClientFactoryBean() {
}
public NodeClientFactoryBean(boolean local) {
this.local = local;
}
@Override
public NodeClient getObject() throws Exception {
return nodeClient;
}
@Override
public Class<? extends Client> getObjectType() {
return NodeClient.class;
}
@Override
public boolean isSingleton() {
return true;
}
@Override
public void afterPropertiesSet() throws Exception {
nodeClient = (NodeClient) nodeBuilder().settings(Settings.builder().put(loadConfig())
.put("http.enabled", String.valueOf(this.enableHttp))
.put("path.home", this.pathHome)
.put("path.data", this.pathData))
.clusterName(this.clusterName).local(this.local).node()
.client();
}
private Settings loadConfig() {
if (StringUtils.isNotBlank(pathConfiguration)) {
InputStream stream = getClass().getClassLoader().getResourceAsStream(pathConfiguration);
if (stream != null) {
return Settings.builder().loadFromStream(pathConfiguration, getClass().getClassLoader().getResourceAsStream(pathConfiguration)).build();
}
logger.error(String.format("Unable to read node configuration from file [%s]", pathConfiguration));
}
return Settings.builder().build();
}
public void setLocal(boolean local) {
this.local = local;
}
public void setEnableHttp(boolean enableHttp) {
this.enableHttp = enableHttp;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public void setPathData(String pathData) {
this.pathData = pathData;
}
public void setPathHome(String pathHome) {
this.pathHome = pathHome;
}
public void setPathConfiguration(String configuration) {
this.pathConfiguration = configuration;
}
@Override
public void destroy() throws Exception {
try {
logger.info("Closing elasticSearch client");
if (nodeClient != null) {
nodeClient.close();
}
} catch (final Exception e) {
logger.error("Error closing ElasticSearch client: ", e);
}
}
}
/*
* Copyright 2015-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch.client;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collection;
import org.apache.commons.lang.StringUtils;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.Node;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.transport.Netty4Plugin;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import static java.util.Arrays.*;
/**
* NodeClientFactoryBean
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
public class NodeClientFactoryBean implements FactoryBean<Client>, InitializingBean, DisposableBean {
private static final Logger logger = LoggerFactory.getLogger(NodeClientFactoryBean.class);
private boolean local;
private boolean enableHttp;
private String clusterName;
private NodeClient nodeClient;
private String pathData;
private String pathHome;
private String pathConfiguration;
public static class TestNode extends Node {
public TestNode(Settings preparedSettings, Collection<Class<? extends Plugin>> classpathPlugins) {
super(InternalSettingsPreparer.prepareEnvironment(preparedSettings, null), classpathPlugins);
}
}
NodeClientFactoryBean() {
}
public NodeClientFactoryBean(boolean local) {
this.local = local;
}
@Override
public NodeClient getObject() throws Exception {
return nodeClient;
}
@Override
public Class<? extends Client> getObjectType() {
return NodeClient.class;
}
@Override
public boolean isSingleton() {
return true;
}
@Override
public void afterPropertiesSet() throws Exception {
nodeClient = (NodeClient) new TestNode(
Settings.builder().put(loadConfig())
.put("transport.type", "netty4")
.put("transport.type", "local")
.put("http.type", "netty4")
.put("path.home", this.pathHome)
.put("path.data", this.pathData)
.put("cluster.name", this.clusterName)
.put("node.max_local_storage_nodes", 100)
.put("script.inline", "true")
.build(), asList(Netty4Plugin.class)).start().client();
}
private Settings loadConfig() throws IOException {
if (StringUtils.isNotBlank(pathConfiguration)) {
InputStream stream = getClass().getClassLoader().getResourceAsStream(pathConfiguration);
if (stream != null) {
return Settings.builder().loadFromStream(pathConfiguration, getClass().getClassLoader().getResourceAsStream(pathConfiguration)).build();
}
logger.error(String.format("Unable to read node configuration from file [%s]", pathConfiguration));
}
return Settings.builder().build();
}
public void setLocal(boolean local) {
this.local = local;
}
public void setEnableHttp(boolean enableHttp) {
this.enableHttp = enableHttp;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public void setPathData(String pathData) {
this.pathData = pathData;
}
public void setPathHome(String pathHome) {
this.pathHome = pathHome;
}
public void setPathConfiguration(String configuration) {
this.pathConfiguration = configuration;
}
@Override
public void destroy() throws Exception {
try {
logger.info("Closing elasticSearch client");
if (nodeClient != null) {
nodeClient.close();
}
} catch (final Exception e) {
logger.error("Error closing ElasticSearch client: ", e);
}
}
}

View File

@ -23,6 +23,7 @@ import java.util.Properties;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
@ -86,7 +87,8 @@ public class TransportClientFactoryBean implements FactoryBean<TransportClient>,
}
protected void buildClient() throws Exception {
client = TransportClient.builder().settings(settings()).build();
client = new PreBuiltTransportClient(settings());
Assert.hasText(clusterNodes, "[Assertion failed] clusterNodes settings missing.");
for (String clusterNode : split(clusterNodes, COMMA)) {
String hostName = substringBeforeLast(clusterNode, COLON);

View File

@ -1,57 +1,57 @@
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch.config;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.data.elasticsearch.client.NodeClientFactoryBean;
import org.w3c.dom.Element;
/**
* NodeClientBeanDefinitionParser
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
public class NodeClientBeanDefinitionParser extends AbstractBeanDefinitionParser {
@Override
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(NodeClientFactoryBean.class);
setLocalSettings(element, builder);
return getSourcedBeanDefinition(builder, element, parserContext);
}
private void setLocalSettings(Element element, BeanDefinitionBuilder builder) {
builder.addPropertyValue("local", Boolean.valueOf(element.getAttribute("local")));
builder.addPropertyValue("clusterName", element.getAttribute("cluster-name"));
builder.addPropertyValue("enableHttp", Boolean.valueOf(element.getAttribute("http-enabled")));
builder.addPropertyValue("pathData", element.getAttribute("path-data"));
builder.addPropertyValue("pathHome", element.getAttribute("path-home"));
builder.addPropertyValue("pathConfiguration", element.getAttribute("path-configuration"));
}
private AbstractBeanDefinition getSourcedBeanDefinition(BeanDefinitionBuilder builder, Element source,
ParserContext context) {
AbstractBeanDefinition definition = builder.getBeanDefinition();
definition.setSource(context.extractSource(source));
return definition;
}
}
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch.config;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.data.elasticsearch.client.NodeClientFactoryBean;
import org.w3c.dom.Element;
/**
* NodeClientBeanDefinitionParser
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
public class NodeClientBeanDefinitionParser extends AbstractBeanDefinitionParser {
@Override
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(NodeClientFactoryBean.class);
setLocalSettings(element, builder);
return getSourcedBeanDefinition(builder, element, parserContext);
}
private void setLocalSettings(Element element, BeanDefinitionBuilder builder) {
builder.addPropertyValue("local", Boolean.valueOf(element.getAttribute("local")));
builder.addPropertyValue("clusterName", element.getAttribute("cluster-name"));
builder.addPropertyValue("enableHttp", Boolean.valueOf(element.getAttribute("http-enabled")));
builder.addPropertyValue("pathData", element.getAttribute("path-data"));
builder.addPropertyValue("pathHome", element.getAttribute("path-home"));
builder.addPropertyValue("pathConfiguration", element.getAttribute("path-configuration"));
}
private AbstractBeanDefinition getSourcedBeanDefinition(BeanDefinitionBuilder builder, Element source,
ParserContext context) {
AbstractBeanDefinition definition = builder.getBeanDefinition();
definition.setSource(context.extractSource(source));
return definition;
}
}

View File

@ -116,8 +116,8 @@ class CriteriaFilterProcessor {
Object[] valArray = (Object[]) value;
Assert.noNullElements(valArray, "Geo distance filter takes 2 not null elements array as parameter.");
Assert.isTrue(valArray.length == 2, "Geo distance filter takes a 2-elements array as parameter.");
Assert.isTrue(valArray[0] instanceof GeoPoint || valArray[0] instanceof String || valArray[0] instanceof Point, "First element of a geo distance filter must be a GeoPoint, a Point or a String");
Assert.isTrue(valArray[1] instanceof String || valArray[1] instanceof Distance, "Second element of a geo distance filter must be a String or a Distance");
Assert.isTrue(valArray[0] instanceof GeoPoint || valArray[0] instanceof String || valArray[0] instanceof Point, "First element of a geo distance filter must be a GeoPoint, a Point or a text");
Assert.isTrue(valArray[1] instanceof String || valArray[1] instanceof Distance, "Second element of a geo distance filter must be a text or a Distance");
StringBuilder dist = new StringBuilder();
@ -129,15 +129,15 @@ class CriteriaFilterProcessor {
if (valArray[0] instanceof GeoPoint) {
GeoPoint loc = (GeoPoint) valArray[0];
geoDistanceQueryBuilder.lat(loc.getLat()).lon(loc.getLon()).distance(dist.toString()).geoDistance(GeoDistance.PLANE);
geoDistanceQueryBuilder.point(loc.getLat(),loc.getLon()).distance(dist.toString()).geoDistance(GeoDistance.PLANE);
} else if (valArray[0] instanceof Point) {
GeoPoint loc = GeoPoint.fromPoint((Point) valArray[0]);
geoDistanceQueryBuilder.lat(loc.getLat()).lon(loc.getLon()).distance(dist.toString()).geoDistance(GeoDistance.PLANE);
geoDistanceQueryBuilder.point(loc.getLat(), loc.getLon()).distance(dist.toString()).geoDistance(GeoDistance.PLANE);
} else {
String loc = (String) valArray[0];
if (loc.contains(",")) {
String c[] = loc.split(",");
geoDistanceQueryBuilder.lat(Double.parseDouble(c[0])).lon(Double.parseDouble(c[1])).distance(dist.toString()).geoDistance(GeoDistance.PLANE);
geoDistanceQueryBuilder.point(Double.parseDouble(c[0]), Double.parseDouble(c[1])).distance(dist.toString()).geoDistance(GeoDistance.PLANE);
} else {
geoDistanceQueryBuilder.geohash(loc).distance(dist.toString()).geoDistance(GeoDistance.PLANE);
}
@ -159,7 +159,7 @@ class CriteriaFilterProcessor {
oneParameterBBox((GeoBoundingBoxQueryBuilder) filter, valArray[0]);
} else if (valArray.length == 2) {
//2x GeoPoint
//2x String
//2x text
twoParameterBBox((GeoBoundingBoxQueryBuilder) filter, valArray);
} else {
//error
@ -206,8 +206,7 @@ class CriteriaFilterProcessor {
geoBBox = (GeoBox) value;
}
filter.topLeft(geoBBox.getTopLeft().getLat(), geoBBox.getTopLeft().getLon());
filter.bottomRight(geoBBox.getBottomRight().getLat(), geoBBox.getBottomRight().getLon());
filter.setCorners(geoBBox.getTopLeft().getLat(), geoBBox.getTopLeft().getLon(), geoBBox.getBottomRight().getLat(), geoBBox.getBottomRight().getLon());
}
private static boolean isType(Object[] array, Class clazz) {
@ -220,17 +219,15 @@ class CriteriaFilterProcessor {
}
private void twoParameterBBox(GeoBoundingBoxQueryBuilder filter, Object[] values) {
Assert.isTrue(isType(values, GeoPoint.class) || isType(values, String.class), " both elements of boundedBy filter must be type of GeoPoint or String(format lat,lon or geohash)");
Assert.isTrue(isType(values, GeoPoint.class) || isType(values, String.class), " both elements of boundedBy filter must be type of GeoPoint or text(format lat,lon or geohash)");
if (values[0] instanceof GeoPoint) {
GeoPoint topLeft = (GeoPoint) values[0];
GeoPoint bottomRight = (GeoPoint) values[1];
filter.topLeft(topLeft.getLat(), topLeft.getLon());
filter.bottomRight(bottomRight.getLat(), bottomRight.getLon());
filter.setCorners(topLeft.getLat(), topLeft.getLon(), bottomRight.getLat(), bottomRight.getLon());
} else {
String topLeft = (String) values[0];
String bottomRight = (String) values[1];
filter.topLeft(topLeft);
filter.bottomRight(bottomRight);
filter.setCorners(topLeft, bottomRight);
}
}

View File

@ -15,6 +15,7 @@
*/
package org.springframework.data.elasticsearch.core;
import static org.elasticsearch.index.query.Operator.AND;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.springframework.data.elasticsearch.core.query.Criteria.*;
@ -24,10 +25,7 @@ import java.util.List;
import java.util.ListIterator;
import org.apache.lucene.queryparser.flexible.core.util.StringUtils;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostableQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryStringQueryBuilder;
import org.elasticsearch.index.query.*;
import org.springframework.data.elasticsearch.core.query.Criteria;
import org.springframework.util.Assert;
@ -148,7 +146,7 @@ class CriteriaQueryProcessor {
switch (key) {
case EQUALS:
query = queryStringQuery(searchText).field(fieldName).defaultOperator(QueryStringQueryBuilder.Operator.AND);
query = queryStringQuery(searchText).field(fieldName).defaultOperator(AND);
break;
case CONTAINS:
query = queryStringQuery("*" + searchText + "*").field(fieldName).analyzeWildcard(true);
@ -203,8 +201,6 @@ class CriteriaQueryProcessor {
if (Float.isNaN(boost)) {
return;
}
if (query instanceof BoostableQueryBuilder) {
((BoostableQueryBuilder) query).boost(boost);
}
query.boost(boost);
}
}

View File

@ -94,7 +94,7 @@ public class DefaultResultMapper extends AbstractResultMapper {
}
}
return new AggregatedPageImpl<>(results, pageable, totalHits, response.getAggregations());
return new AggregatedPageImpl<T>(results, pageable, totalHits, response.getAggregations(), response.getScrollId());
}
private <T> void populateScriptFields(T result, SearchHit hit) {

View File

@ -18,6 +18,7 @@ package org.springframework.data.elasticsearch.core;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.AliasMetaData;
import org.elasticsearch.common.Nullable;
import org.springframework.data.domain.Page;
import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter;
import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentEntity;
@ -480,79 +481,52 @@ public interface ElasticsearchOperations {
<T> void refresh(Class<T> clazz);
/**
* Returns scroll id for criteria query
*
* @param query The criteria query.
* @param scrollTimeInMillis The time in millisecond for scroll feature
* {@link org.elasticsearch.action.search.SearchRequestBuilder#setScroll(org.elasticsearch.common.unit.TimeValue)}.
* @param noFields The no fields support
* {@link org.elasticsearch.action.search.SearchRequestBuilder#setNoFields()}.
* @return The scan id for input query.
*/
String scan(CriteriaQuery query, long scrollTimeInMillis, boolean noFields);
/**
* Returns scroll id for criteria query
*
* @param query The criteria query.
* @param scrollTimeInMillis The time in millisecond for scroll feature
* {@link org.elasticsearch.action.search.SearchRequestBuilder#setScroll(org.elasticsearch.common.unit.TimeValue)}.
* @param noFields The no fields support
* {@link org.elasticsearch.action.search.SearchRequestBuilder#setNoFields()}.
* @param clazz The class of entity to retrieve.
* @param <T> The type of entity to retrieve.
* @return The scan id for input query.
*/
<T> String scan(CriteriaQuery query, long scrollTimeInMillis, boolean noFields, Class<T> clazz);
/**
* Returns scroll id for scan query
* Returns scrolled page for given query
*
* @param query The search query.
* @param scrollTimeInMillis The time in millisecond for scroll feature
* {@link org.elasticsearch.action.search.SearchRequestBuilder#setScroll(org.elasticsearch.common.unit.TimeValue)}.
* @param noFields The no fields support
* {@link org.elasticsearch.action.search.SearchRequestBuilder#setNoFields()}.
* @param clazz The class of entity to retrieve.
* @return The scan id for input query.
*/
String scan(SearchQuery query, long scrollTimeInMillis, boolean noFields);
<T> Page<T> startScroll(long scrollTimeInMillis, SearchQuery query, Class<T> clazz);
/**
* Returns scroll id for scan query
* Returns scrolled page for given query
*
* @param query The search query.
* @param scrollTimeInMillis The time in millisecond for scroll feature
* {@link org.elasticsearch.action.search.SearchRequestBuilder#setScroll(org.elasticsearch.common.unit.TimeValue)}.
* @param noFields The no fields support
* {@link org.elasticsearch.action.search.SearchRequestBuilder#setNoFields()}.
* @param clazz The class of entity to retrieve.
* @param <T> The type of entity to retrieve.
* @param mapper Custom impl to map result to entities
* @return The scan id for input query.
*/
<T> String scan(SearchQuery query, long scrollTimeInMillis, boolean noFields, Class<T> clazz);
<T> Page<T> startScroll(long scrollTimeInMillis, SearchQuery query, Class<T> clazz, SearchResultMapper mapper);
/**
* Scrolls the results for give scroll id
* Returns scrolled page for given query
*
* @param scrollId
* @param scrollTimeInMillis
* @param clazz
* @param <T>
* @return
* @param criteriaQuery The search query.
* @param scrollTimeInMillis The time in millisecond for scroll feature
* {@link org.elasticsearch.action.search.SearchRequestBuilder#setScroll(org.elasticsearch.common.unit.TimeValue)}.
* @param clazz The class of entity to retrieve.
* @return The scan id for input query.
*/
<T> Page<T> scroll(String scrollId, long scrollTimeInMillis, Class<T> clazz);
<T> Page<T> startScroll(long scrollTimeInMillis, CriteriaQuery criteriaQuery, Class<T> clazz);
/**
* Scrolls the results for give scroll id using custom result mapper
* Returns scrolled page for given query
*
* @param scrollId
* @param scrollTimeInMillis
* @param mapper
* @param <T>
* @return
* @param criteriaQuery The search query.
* @param scrollTimeInMillis The time in millisecond for scroll feature
* {@link org.elasticsearch.action.search.SearchRequestBuilder#setScroll(org.elasticsearch.common.unit.TimeValue)}.
* @param mapper Custom impl to map result to entities
* @return The scan id for input query.
*/
<T> Page<T> scroll(String scrollId, long scrollTimeInMillis, SearchResultMapper mapper);
<T> Page<T> startScroll(long scrollTimeInMillis, CriteriaQuery criteriaQuery, Class<T> clazz, SearchResultMapper mapper);
<T> Page<T> continueScroll(@Nullable String scrollId, long scrollTimeInMillis, Class<T> clazz);
<T> Page<T> continueScroll(@Nullable String scrollId, long scrollTimeInMillis, Class<T> clazz, SearchResultMapper mapper);
/**
* Clears the search contexts associated with specified scroll ids.
*

View File

@ -15,19 +15,11 @@
*/
package org.springframework.data.elasticsearch.core;
import static org.apache.commons.lang.StringUtils.*;
import static org.elasticsearch.action.search.SearchType.*;
import static org.elasticsearch.client.Requests.*;
import static org.elasticsearch.cluster.metadata.AliasAction.Type.*;
import static org.elasticsearch.index.VersionType.*;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.springframework.data.elasticsearch.core.MappingBuilder.*;
import static org.springframework.util.CollectionUtils.isEmpty;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
@ -35,8 +27,8 @@ import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import org.elasticsearch.action.ListenableActionFuture;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
@ -46,7 +38,6 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.count.CountRequestBuilder;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.MultiGetRequestBuilder;
@ -54,15 +45,12 @@ import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.suggest.SuggestRequestBuilder;
import org.elasticsearch.action.suggest.SuggestResponse;
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.metadata.AliasAction;
import org.elasticsearch.cluster.metadata.AliasMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -71,7 +59,7 @@ import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.suggest.SuggestBuilder;
@ -100,6 +88,12 @@ import org.springframework.data.elasticsearch.core.mapping.SimpleElasticsearchMa
import org.springframework.data.elasticsearch.core.query.*;
import org.springframework.data.util.CloseableIterator;
import org.springframework.util.Assert;
import static org.apache.commons.lang.StringUtils.*;
import static org.elasticsearch.client.Requests.*;
import static org.elasticsearch.index.VersionType.*;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.springframework.data.elasticsearch.core.MappingBuilder.*;
import static org.springframework.util.CollectionUtils.isEmpty;
/**
* ElasticsearchTemplate
@ -315,7 +309,7 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
@Override
public <T> List<String> queryForIds(SearchQuery query) {
SearchRequestBuilder request = prepareSearch(query).setQuery(query.getQuery()).setNoFields();
SearchRequestBuilder request = prepareSearch(query).setQuery(query.getQuery());
if (query.getFilter() != null) {
request.setPostFilter(query.getFilter());
}
@ -357,15 +351,14 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
@Override
public <T> Page<T> queryForPage(StringQuery query, Class<T> clazz, SearchResultMapper mapper) {
SearchResponse response = getSearchResponse(prepareSearch(query, clazz).setQuery(query.getSource()).execute());
SearchResponse response = getSearchResponse(prepareSearch(query, clazz).setQuery(wrapperQuery(query.getSource())).execute());
return mapper.mapResults(response, clazz, query.getPageable());
}
@Override
public <T> CloseableIterator<T> stream(CriteriaQuery query, Class<T> clazz) {
final long scrollTimeInMillis = TimeValue.timeValueMinutes(1).millis();
final String initScrollId = scan(query, scrollTimeInMillis, false, clazz);
return doStream(initScrollId, scrollTimeInMillis, clazz, resultsMapper);
return doStream(scrollTimeInMillis, (ScrolledPage<T>) startScroll(scrollTimeInMillis, query, clazz), clazz, resultsMapper);
}
@Override
@ -376,29 +369,27 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
@Override
public <T> CloseableIterator<T> stream(SearchQuery query, final Class<T> clazz, final SearchResultMapper mapper) {
final long scrollTimeInMillis = TimeValue.timeValueMinutes(1).millis();
final String initScrollId = scan(query, scrollTimeInMillis, false, clazz);
return doStream(initScrollId, scrollTimeInMillis, clazz, mapper);
return doStream(scrollTimeInMillis, (ScrolledPage<T>) startScroll(scrollTimeInMillis, query, clazz, mapper), clazz, mapper);
}
private <T> CloseableIterator<T> doStream(final String initScrollId, final long scrollTimeInMillis,
final Class<T> clazz, final SearchResultMapper mapper) {
private <T> CloseableIterator<T> doStream(final long scrollTimeInMillis, final ScrolledPage<T> page, final Class<T> clazz, final SearchResultMapper mapper) {
return new CloseableIterator<T>() {
/** As we couldn't retrieve single result with scroll, store current hits. */
private volatile Iterator<T> currentHits;
private volatile Iterator<T> currentHits = page.iterator();
/** The scroll id. */
private volatile String scrollId = initScrollId;
private volatile String scrollId = page.getScrollId();
/** If stream is finished (ie: cluster returns no results. */
private volatile boolean finished;
private volatile boolean finished = !currentHits.hasNext();
@Override
public void close() {
try {
// Clear scroll on cluster only in case of error (cause elasticsearch auto clear scroll when it's done)
if (!finished && scrollId != null && currentHits != null && currentHits.hasNext()) {
client.prepareClearScroll().addScrollId(scrollId).execute().actionGet();
clearScroll(scrollId);
}
} finally {
currentHits = null;
@ -415,12 +406,11 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
// Test if it remains hits
if (currentHits == null || !currentHits.hasNext()) {
// Do a new request
SearchResponse response = getSearchResponse(
client.prepareSearchScroll(scrollId).setScroll(TimeValue.timeValueMillis(scrollTimeInMillis)).execute());
final ScrolledPage<T> scroll = (ScrolledPage<T>) continueScroll(scrollId, scrollTimeInMillis, clazz, mapper);
// Save hits and scroll id
currentHits = mapper.mapResults(response, clazz, Pageable.unpaged()).iterator();
currentHits = scroll.iterator();
finished = !currentHits.hasNext();
scrollId = response.getScrollId();
scrollId = scroll.getScrollId();
}
return currentHits.hasNext();
}
@ -477,11 +467,12 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
return count(query, null);
}
private long doCount(CountRequestBuilder countRequestBuilder, QueryBuilder elasticsearchQuery) {
private long doCount(SearchRequestBuilder countRequestBuilder, QueryBuilder elasticsearchQuery) {
if (elasticsearchQuery != null) {
countRequestBuilder.setQuery(elasticsearchQuery);
}
return countRequestBuilder.execute().actionGet().getCount();
return countRequestBuilder.execute().actionGet().getHits().getTotalHits();
}
private long doCount(SearchRequestBuilder searchRequestBuilder, QueryBuilder elasticsearchQuery,
@ -494,11 +485,10 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
if (elasticsearchFilter != null) {
searchRequestBuilder.setPostFilter(elasticsearchFilter);
}
searchRequestBuilder.setSearchType(SearchType.COUNT);
return searchRequestBuilder.execute().actionGet().getHits().getTotalHits();
}
private <T> CountRequestBuilder prepareCount(Query query, Class<T> clazz) {
private <T> SearchRequestBuilder prepareCount(Query query, Class<T> clazz) {
String indexName[] = !isEmpty(query.getIndices())
? query.getIndices().toArray(new String[query.getIndices().size()])
: retrieveIndexNameFromPersistentEntity(clazz);
@ -507,11 +497,12 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
Assert.notNull(indexName, "No index defined for Query");
CountRequestBuilder countRequestBuilder = client.prepareCount(indexName);
SearchRequestBuilder countRequestBuilder = client.prepareSearch(indexName);
if (types != null) {
countRequestBuilder.setTypes(types);
}
countRequestBuilder.setSize(0);
return countRequestBuilder;
}
@ -533,6 +524,10 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
MultiGetRequestBuilder builder = client.prepareMultiGet();
if (searchQuery.getFields() != null && !searchQuery.getFields().isEmpty()) {
searchQuery.addSourceFilter(new FetchSourceFilter(toArray(searchQuery.getFields()), null));
}
for (String id : searchQuery.getIds()) {
MultiGetRequest.Item item = new MultiGetRequest.Item(indexName, type, id);
@ -541,9 +536,6 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
item = item.routing(searchQuery.getRoute());
}
if (searchQuery.getFields() != null && !searchQuery.getFields().isEmpty()) {
item = item.fields(toArray(searchQuery.getFields()));
}
builder.add(item);
}
return builder.execute().actionGet();
@ -602,18 +594,7 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
for (IndexQuery query : queries) {
bulkRequest.add(prepareIndex(query));
}
BulkResponse bulkResponse = bulkRequest.execute().actionGet();
if (bulkResponse.hasFailures()) {
Map<String, String> failedDocuments = new HashMap<>();
for (BulkItemResponse item : bulkResponse.getItems()) {
if (item.isFailed())
failedDocuments.put(item.getId(), item.getFailureMessage());
}
throw new ElasticsearchException(
"Bulk indexing has failures. Use ElasticsearchException.getFailedDocuments() for detailed messages ["
+ failedDocuments + "]",
failedDocuments);
}
checkForBulkUpdateFailure(bulkRequest.execute().actionGet());
}
@Override
@ -622,7 +603,10 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
for (UpdateQuery query : queries) {
bulkRequest.add(prepareUpdate(query));
}
BulkResponse bulkResponse = bulkRequest.execute().actionGet();
checkForBulkUpdateFailure(bulkRequest.execute().actionGet());
}
private void checkForBulkUpdateFailure(BulkResponse bulkResponse) {
if (bulkResponse.hasFailures()) {
Map<String, String> failedDocuments = new HashMap<>();
for (BulkItemResponse item : bulkResponse.getItems()) {
@ -691,32 +675,29 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(deleteQuery.getQuery()).withIndices(indexName)
.withTypes(typeName).withPageable(PageRequest.of(0, pageSize)).build();
String scrollId = scan(searchQuery, scrollTimeInMillis, true);
BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
List<String> ids = new ArrayList<>();
boolean hasRecords = true;
while (hasRecords) {
Page<String> page = scroll(scrollId, scrollTimeInMillis, new SearchResultMapper() {
@Override
public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable) {
List<String> result = new ArrayList<>();
for (SearchHit searchHit : response.getHits()) {
String id = searchHit.getId();
result.add(id);
}
if (result.size() > 0) {
return new AggregatedPageImpl<>((List<T>) result);
}
return null;
SearchResultMapper onlyIdResultMapper = new SearchResultMapper() {
@Override
public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable) {
List<String> result = new ArrayList<String>();
for (SearchHit searchHit : response.getHits().getHits()) {
String id = searchHit.getId();
result.add(id);
}
});
if (page != null && page.getContent().size() > 0) {
ids.addAll(page.getContent());
} else {
hasRecords = false;
if (result.size() > 0) {
return new AggregatedPageImpl<T>((List<T>) result, response.getScrollId());
}
return new AggregatedPageImpl<T>(Collections.EMPTY_LIST, response.getScrollId());
}
}
};
Page<String> scrolledResult = startScroll(scrollTimeInMillis, searchQuery, String.class, onlyIdResultMapper);
BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
List<String> ids = new ArrayList<String>();
do {
ids.addAll(scrolledResult.getContent());
scrolledResult = continueScroll(((ScrolledPage<T>)scrolledResult).getScrollId(), scrollTimeInMillis, String.class, onlyIdResultMapper);
} while(scrolledResult.getContent().size() != 0);
for (String id : ids) {
bulkRequestBuilder.add(client.prepareDelete(indexName, typeName, id));
@ -726,7 +707,7 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
bulkRequestBuilder.execute().actionGet();
}
clearScroll(scrollId);
clearScroll(((ScrolledPage<T>) scrolledResult).getScrollId());
}
@Override
@ -745,33 +726,13 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
delete(deleteQuery, clazz);
}
@Override
public String scan(CriteriaQuery criteriaQuery, long scrollTimeInMillis, boolean noFields) {
return doScan(prepareScan(criteriaQuery, scrollTimeInMillis, noFields), criteriaQuery);
}
@Override
public <T> String scan(CriteriaQuery criteriaQuery, long scrollTimeInMillis, boolean noFields, Class<T> clazz) {
return doScan(prepareScan(criteriaQuery, scrollTimeInMillis, noFields, clazz), criteriaQuery);
}
@Override
public String scan(SearchQuery searchQuery, long scrollTimeInMillis, boolean noFields) {
return doScan(prepareScan(searchQuery, scrollTimeInMillis, noFields), searchQuery);
}
@Override
public <T> String scan(SearchQuery searchQuery, long scrollTimeInMillis, boolean noFields, Class<T> clazz) {
return doScan(prepareScan(searchQuery, scrollTimeInMillis, noFields, clazz), searchQuery);
}
private <T> SearchRequestBuilder prepareScan(Query query, long scrollTimeInMillis, boolean noFields, Class<T> clazz) {
private <T> SearchRequestBuilder prepareScroll(Query query, long scrollTimeInMillis, Class<T> clazz) {
setPersistentEntityIndexAndType(query, clazz);
return prepareScan(query, scrollTimeInMillis, noFields);
return prepareScroll(query, scrollTimeInMillis);
}
private SearchRequestBuilder prepareScan(Query query, long scrollTimeInMillis, boolean noFields) {
SearchRequestBuilder requestBuilder = client.prepareSearch(toArray(query.getIndices())).setSearchType(SCAN)
private SearchRequestBuilder prepareScroll(Query query, long scrollTimeInMillis) {
SearchRequestBuilder requestBuilder = client.prepareSearch(toArray(query.getIndices()))
.setTypes(toArray(query.getTypes())).setScroll(TimeValue.timeValueMillis(scrollTimeInMillis)).setFrom(0);
if(query.getPageable().isPaged()){
@ -779,16 +740,12 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
}
if (!isEmpty(query.getFields())) {
requestBuilder.addFields(toArray(query.getFields()));
}
if (noFields) {
requestBuilder.setNoFields();
requestBuilder.setFetchSource(toArray(query.getFields()), null);
}
return requestBuilder;
}
private String doScan(SearchRequestBuilder requestBuilder, CriteriaQuery criteriaQuery) {
private SearchResponse doScroll(SearchRequestBuilder requestBuilder, CriteriaQuery criteriaQuery) {
Assert.notNull(criteriaQuery.getIndices(), "No index defined for Query");
Assert.notNull(criteriaQuery.getTypes(), "No type define for Query");
Assert.notNull(criteriaQuery.getPageable(), "Query.pageable is required for scan & scroll");
@ -807,10 +764,10 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
requestBuilder.setPostFilter(elasticsearchFilter);
}
return getSearchResponse(requestBuilder.execute()).getScrollId();
return getSearchResponse(requestBuilder.execute());
}
private String doScan(SearchRequestBuilder requestBuilder, SearchQuery searchQuery) {
private SearchResponse doScroll(SearchRequestBuilder requestBuilder, SearchQuery searchQuery) {
Assert.notNull(searchQuery.getIndices(), "No index defined for Query");
Assert.notNull(searchQuery.getTypes(), "No type define for Query");
Assert.notNull(searchQuery.getPageable(), "Query.pageable is required for scan & scroll");
@ -819,21 +776,39 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
requestBuilder.setPostFilter(searchQuery.getFilter());
}
return getSearchResponse(requestBuilder.setQuery(searchQuery.getQuery()).execute()).getScrollId();
return getSearchResponse(requestBuilder.setQuery(searchQuery.getQuery()).execute());
}
@Override
public <T> Page<T> scroll(String scrollId, long scrollTimeInMillis, Class<T> clazz) {
SearchResponse response = getSearchResponse(
client.prepareSearchScroll(scrollId).setScroll(TimeValue.timeValueMillis(scrollTimeInMillis)).execute());
public <T> Page<T> startScroll(long scrollTimeInMillis, SearchQuery searchQuery, Class<T> clazz) {
SearchResponse response = doScroll(prepareScroll(searchQuery, scrollTimeInMillis, clazz), searchQuery);
return resultsMapper.mapResults(response, clazz, null);
}
public <T> Page<T> startScroll(long scrollTimeInMillis, CriteriaQuery criteriaQuery, Class<T> clazz) {
SearchResponse response = doScroll(prepareScroll(criteriaQuery, scrollTimeInMillis, clazz), criteriaQuery);
return resultsMapper.mapResults(response, clazz, null);
}
public <T> Page<T> startScroll(long scrollTimeInMillis, SearchQuery searchQuery, Class<T> clazz, SearchResultMapper mapper) {
SearchResponse response = doScroll(prepareScroll(searchQuery, scrollTimeInMillis, clazz), searchQuery);
return mapper.mapResults(response, clazz, null);
}
public <T> Page<T> startScroll(long scrollTimeInMillis, CriteriaQuery criteriaQuery, Class<T> clazz, SearchResultMapper mapper) {
SearchResponse response = doScroll(prepareScroll(criteriaQuery, scrollTimeInMillis, clazz), criteriaQuery);
return mapper.mapResults(response, clazz, null);
}
public <T> Page<T> continueScroll(@Nullable String scrollId, long scrollTimeInMillis, Class<T> clazz) {
SearchResponse response = getSearchResponse(client.prepareSearchScroll(scrollId)
.setScroll(TimeValue.timeValueMillis(scrollTimeInMillis)).execute());
return resultsMapper.mapResults(response, clazz, Pageable.unpaged());
}
@Override
public <T> Page<T> scroll(String scrollId, long scrollTimeInMillis, SearchResultMapper mapper) {
SearchResponse response = getSearchResponse(
client.prepareSearchScroll(scrollId).setScroll(TimeValue.timeValueMillis(scrollTimeInMillis)).execute());
return mapper.mapResults(response, null, Pageable.unpaged());
public <T> Page<T> continueScroll(@Nullable String scrollId, long scrollTimeInMillis, Class<T> clazz, SearchResultMapper mapper) {
SearchResponse response = getSearchResponse(client.prepareSearchScroll(scrollId)
.setScroll(TimeValue.timeValueMillis(scrollTimeInMillis)).execute());
return mapper.mapResults(response, clazz, Pageable.unpaged());
}
@Override
@ -852,8 +827,8 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
Assert.notNull(type, "No 'type' defined for MoreLikeThisQuery");
Assert.notNull(query.getId(), "No document id defined for MoreLikeThisQuery");
MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = moreLikeThisQuery()
.addLikeItem(new MoreLikeThisQueryBuilder.Item(indexName, type, query.getId()));
MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = moreLikeThisQuery(toArray(new MoreLikeThisQueryBuilder.Item(indexName, type, query.getId())));
if (query.getMinTermFreq() != null) {
moreLikeThisQueryBuilder.minTermFreq(query.getMinTermFreq());
@ -895,7 +870,8 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
}
if (!searchQuery.getScriptFields().isEmpty()) {
searchRequest.addField("_source");
//_source should be return all the time
//searchRequest.addStoredField("_source");
for (ScriptField scriptedField : searchQuery.getScriptFields()) {
searchRequest.addScriptField(scriptedField.fieldName(), scriptedField.script());
}
@ -903,7 +879,7 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
if (searchQuery.getHighlightFields() != null) {
for (HighlightBuilder.Field highlightField : searchQuery.getHighlightFields()) {
searchRequest.addHighlightedField(highlightField);
searchRequest.highlighter(new HighlightBuilder().field(highlightField));
}
}
@ -1016,7 +992,7 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
searchRequestBuilder.setFrom(startRecord);
if (!query.getFields().isEmpty()) {
searchRequestBuilder.addFields(toArray(query.getFields()));
searchRequestBuilder.setFetchSource(toArray(query.getFields()),null);
}
if (query.getSort() != null) {
@ -1086,7 +1062,8 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
public Boolean addAlias(AliasQuery query) {
Assert.notNull(query.getIndexName(), "No index defined for Alias");
Assert.notNull(query.getAliasName(), "No alias defined");
AliasAction aliasAction = new AliasAction(ADD, query.getIndexName(), query.getAliasName());
final IndicesAliasesRequest.AliasActions aliasAction = IndicesAliasesRequest.AliasActions.add().alias(query.getAliasName()).index(query.getIndexName());
if (query.getFilterBuilder() != null) {
aliasAction.filter(query.getFilterBuilder());
} else if (query.getFilter() != null) {
@ -1135,7 +1112,7 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
ElasticsearchPersistentEntity<?> persistentEntity = getPersistentEntityFor(entity.getClass());
Optional<ElasticsearchPersistentProperty> idProperty = persistentEntity.getIdProperty();
// Only deal with String because ES generated Ids are strings !
// Only deal with text because ES generated Ids are strings !
idProperty.ifPresent(property -> {
@ -1190,6 +1167,10 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
return values.toArray(valuesAsArray);
}
private static MoreLikeThisQueryBuilder.Item[] toArray(MoreLikeThisQueryBuilder.Item... values) {
return values;
}
protected ResultsMapper getResultsMapper() {
return resultsMapper;
}
@ -1224,13 +1205,11 @@ public class ElasticsearchTemplate implements ElasticsearchOperations, Applicati
return stringBuilder.toString();
}
public SuggestResponse suggest(SuggestBuilder.SuggestionBuilder<?> suggestion, String... indices) {
SuggestRequestBuilder suggestRequestBuilder = client.prepareSuggest(indices);
suggestRequestBuilder.addSuggestion(suggestion);
return suggestRequestBuilder.execute().actionGet();
public SearchResponse suggest(SuggestBuilder suggestion, String... indices) {
return client.prepareSearch(indices).suggest(suggestion).get();
}
public SuggestResponse suggest(SuggestBuilder.SuggestionBuilder<?> suggestion, Class clazz) {
public SearchResponse suggest(SuggestBuilder suggestion, Class clazz) {
return suggest(suggestion, retrieveIndexNameFromPersistentEntity(clazz));
}
}

View File

@ -54,7 +54,7 @@ public abstract class FacetedPageImpl<T> extends PageImpl<T> implements FacetedP
}
public FacetedPageImpl(List<T> content, Pageable pageable, long total) {
super(content, pageable, total);
super(content, Pageable.unpaged(), total);
}
@Override

View File

@ -51,6 +51,7 @@ import org.springframework.data.util.TypeInformation;
*/
class MappingBuilder {
public static final String FIELD_DATA = "fielddata";
public static final String FIELD_STORE = "store";
public static final String FIELD_TYPE = "type";
public static final String FIELD_INDEX = "index";
@ -60,13 +61,12 @@ class MappingBuilder {
public static final String FIELD_PROPERTIES = "properties";
public static final String FIELD_PARENT = "_parent";
public static final String COMPLETION_PAYLOADS = "payloads";
public static final String COMPLETION_PRESERVE_SEPARATORS = "preserve_separators";
public static final String COMPLETION_PRESERVE_POSITION_INCREMENTS = "preserve_position_increments";
public static final String COMPLETION_MAX_INPUT_LENGTH = "max_input_length";
public static final String INDEX_VALUE_NOT_ANALYZED = "not_analyzed";
public static final String TYPE_VALUE_STRING = "string";
public static final String TYPE_VALUE_STRING = "text";
public static final String TYPE_VALUE_GEO_POINT = "geo_point";
public static final String TYPE_VALUE_COMPLETION = "completion";
public static final String TYPE_VALUE_GEO_HASH_PREFIX = "geohash_prefix";
@ -192,18 +192,18 @@ class MappingBuilder {
xContentBuilder.field(FIELD_TYPE, TYPE_VALUE_GEO_POINT);
GeoPointField annotation = field.getAnnotation(GeoPointField.class);
if (annotation != null) {
if (annotation.geoHashPrefix()) {
xContentBuilder.field(TYPE_VALUE_GEO_HASH_PREFIX, true);
if (StringUtils.isNotEmpty(annotation.geoHashPrecision())) {
if (NumberUtils.isNumber(annotation.geoHashPrecision())) {
xContentBuilder.field(TYPE_VALUE_GEO_HASH_PRECISION, Integer.parseInt(annotation.geoHashPrecision()));
} else {
xContentBuilder.field(TYPE_VALUE_GEO_HASH_PRECISION, annotation.geoHashPrecision());
}
}
}
}
// if (annotation != null) {
// if (annotation.geoHashPrefix()) {
// xContentBuilder.field(TYPE_VALUE_GEO_HASH_PREFIX, true);
// if (StringUtils.isNotEmpty(annotation.geoHashPrecision())) {
// if (NumberUtils.isNumber(annotation.geoHashPrecision())) {
// xContentBuilder.field(TYPE_VALUE_GEO_HASH_PRECISION, Integer.parseInt(annotation.geoHashPrecision()));
// } else {
// xContentBuilder.field(TYPE_VALUE_GEO_HASH_PRECISION, annotation.geoHashPrecision());
// }
// }
// }
// }
xContentBuilder.endObject();
}
@ -213,7 +213,6 @@ class MappingBuilder {
xContentBuilder.field(FIELD_TYPE, TYPE_VALUE_COMPLETION);
if (annotation != null) {
xContentBuilder.field(COMPLETION_MAX_INPUT_LENGTH, annotation.maxInputLength());
xContentBuilder.field(COMPLETION_PAYLOADS, annotation.payloads());
xContentBuilder.field(COMPLETION_PRESERVE_POSITION_INCREMENTS, annotation.preservePositionIncrements());
xContentBuilder.field(COMPLETION_PRESERVE_SEPARATORS, annotation.preserveSeparators());
if (isNotBlank(annotation.searchAnalyzer())) {
@ -245,6 +244,10 @@ class MappingBuilder {
if(!nestedOrObjectField) {
xContentBuilder.field(FIELD_STORE, fieldAnnotation.store());
}
if(fieldAnnotation.fielddata()) {
xContentBuilder.field(FIELD_DATA, fieldAnnotation.fielddata());
}
if (FieldType.Auto != fieldAnnotation.type()) {
xContentBuilder.field(FIELD_TYPE, fieldAnnotation.type().name().toLowerCase());
if (FieldType.Date == fieldAnnotation.type() && DateFormat.none != fieldAnnotation.format()) {
@ -252,8 +255,8 @@ class MappingBuilder {
? fieldAnnotation.pattern() : fieldAnnotation.format());
}
}
if (FieldIndex.not_analyzed == fieldAnnotation.index() || FieldIndex.no == fieldAnnotation.index()) {
xContentBuilder.field(FIELD_INDEX, fieldAnnotation.index().name().toLowerCase());
if(!fieldAnnotation.index()) {
xContentBuilder.field(FIELD_INDEX, fieldAnnotation.index());
}
if (isNotBlank(fieldAnnotation.searchAnalyzer())) {
xContentBuilder.field(FIELD_SEARCH_ANALYZER, fieldAnnotation.searchAnalyzer());
@ -276,8 +279,8 @@ class MappingBuilder {
if (FieldType.Auto != annotation.type()) {
builder.field(FIELD_TYPE, annotation.type().name().toLowerCase());
}
if (FieldIndex.not_analyzed == annotation.index()) {
builder.field(FIELD_INDEX, annotation.index().name().toLowerCase());
if(!annotation.index()) {
builder.field(FIELD_INDEX, annotation.index());
}
if (isNotBlank(annotation.searchAnalyzer())) {
builder.field(FIELD_SEARCH_ANALYZER, annotation.searchAnalyzer());
@ -285,6 +288,9 @@ class MappingBuilder {
if (isNotBlank(annotation.indexAnalyzer())) {
builder.field(FIELD_INDEX_ANALYZER, annotation.indexAnalyzer());
}
if (annotation.fielddata()) {
builder.field(FIELD_DATA, annotation.fielddata());
}
builder.endObject();
}
@ -296,10 +302,10 @@ class MappingBuilder {
private static void addMultiFieldMapping(XContentBuilder builder, java.lang.reflect.Field field,
MultiField annotation, boolean nestedOrObjectField) throws IOException {
builder.startObject(field.getName());
builder.field(FIELD_TYPE, "multi_field");
builder.field(FIELD_TYPE, annotation.mainField().type());
builder.startObject("fields");
//add standard field
addSingleFieldMapping(builder, field, annotation.mainField(),nestedOrObjectField);
//addSingleFieldMapping(builder, field, annotation.mainField(), nestedOrObjectField);
for (InnerField innerField : annotation.otherFields()) {
addNestedFieldMapping(builder, field, innerField);
}

View File

@ -0,0 +1,13 @@
package org.springframework.data.elasticsearch.core;
import org.springframework.data.domain.Page;
/**
* @author Artur Konczak
*/
public interface ScrolledPage<T> extends Page<T> {
String getScrollId();
}

View File

@ -3,11 +3,12 @@ package org.springframework.data.elasticsearch.core.aggregation;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.Aggregations;
import org.springframework.data.elasticsearch.core.FacetedPage;
import org.springframework.data.elasticsearch.core.ScrolledPage;
/**
* @author Petar Tahchiev
*/
public interface AggregatedPage<T> extends FacetedPage<T> {
public interface AggregatedPage<T> extends FacetedPage<T>, ScrolledPage<T> {
boolean hasAggregations();

View File

@ -34,15 +34,26 @@ public class AggregatedPageImpl<T> extends FacetedPageImpl<T> implements Aggrega
private Aggregations aggregations;
private Map<String, Aggregation> mapOfAggregations = new HashMap<>();
private String scrollId;
public AggregatedPageImpl(List<T> content) {
super(content);
}
public AggregatedPageImpl(List<T> content, String scrollId) {
super(content);
this.scrollId = scrollId;
}
public AggregatedPageImpl(List<T> content, Pageable pageable, long total) {
super(content, pageable, total);
}
public AggregatedPageImpl(List<T> content, Pageable pageable, long total, String scrollId) {
super(content, pageable, total);
this.scrollId = scrollId;
}
public AggregatedPageImpl(List<T> content, Pageable pageable, long total, Aggregations aggregations) {
super(content, pageable, total);
this.aggregations = aggregations;
@ -53,6 +64,17 @@ public class AggregatedPageImpl<T> extends FacetedPageImpl<T> implements Aggrega
}
}
public AggregatedPageImpl(List<T> content, Pageable pageable, long total, Aggregations aggregations, String scrollId) {
super(content, pageable, total);
this.aggregations = aggregations;
this.scrollId = scrollId;
if (aggregations != null) {
for (Aggregation aggregation : aggregations) {
mapOfAggregations.put(aggregation.getName(), aggregation);
}
}
}
@Override
public boolean hasAggregations() {
return aggregations != null && mapOfAggregations.size() > 0;
@ -67,4 +89,9 @@ public class AggregatedPageImpl<T> extends FacetedPageImpl<T> implements Aggrega
public Aggregation getAggregation(String name) {
return aggregations == null ? null : aggregations.get(name);
}
@Override
public String getScrollId() {
return scrollId;
}
}

View File

@ -11,9 +11,7 @@ import com.fasterxml.jackson.annotation.JsonInclude;
public class Completion {
private String[] input;
private String output;
private Integer weight;
private Object payload;
private Completion() {
//required by mapper to instantiate object
@ -31,22 +29,6 @@ public class Completion {
this.input = input;
}
public String getOutput() {
return output;
}
public void setOutput(String output) {
this.output = output;
}
public Object getPayload() {
return payload;
}
public void setPayload(Object payload) {
this.payload = payload;
}
public Integer getWeight() {
return weight;
}

View File

@ -19,7 +19,7 @@ package org.springframework.data.elasticsearch.core.facet.request;
import org.apache.commons.lang.StringUtils;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.springframework.data.elasticsearch.core.facet.AbstractFacetRequest;
import org.springframework.util.Assert;
@ -57,16 +57,16 @@ public class HistogramFacetRequest extends AbstractFacetRequest {
Assert.isTrue(StringUtils.isNotBlank(field), "Please select field on which to build the facet !!!");
Assert.isTrue(interval > 0, "Please provide interval as positive value greater them zero !!!");
DateHistogramBuilder dateHistogramBuilder = AggregationBuilders.dateHistogram(getName());
DateHistogramAggregationBuilder dateHistogramBuilder = AggregationBuilders.dateHistogram(getName());
dateHistogramBuilder.field(field);
if (timeUnit != null) {
dateHistogramBuilder.interval(timeUnit);
dateHistogramBuilder.dateHistogramInterval(timeUnit);
} else {
dateHistogramBuilder.interval(interval);
}
dateHistogramBuilder.subAggregation(AggregationBuilders.extendedStats(INTERNAL_STATS));
dateHistogramBuilder.subAggregation(AggregationBuilders.extendedStats(INTERNAL_STATS).field(field));
return dateHistogramBuilder;
}

View File

@ -22,7 +22,7 @@ import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.range.RangeBuilder;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
import org.springframework.data.elasticsearch.core.facet.AbstractFacetRequest;
import org.springframework.util.Assert;
@ -76,15 +76,16 @@ public class RangeFacetRequest extends AbstractFacetRequest {
public AbstractAggregationBuilder getFacet() {
Assert.notNull(getName(), "Facet name can't be a null !!!");
RangeBuilder rangeBuilder = AggregationBuilders.range(getName());
rangeBuilder.field(StringUtils.isNotBlank(keyField) ? keyField : field );
RangeAggregationBuilder rangeBuilder = AggregationBuilders.range(getName());
final String field = StringUtils.isNotBlank(keyField) ? keyField : this.field;
rangeBuilder.field(field);
for (Entry entry : entries) {
DoubleEntry doubleEntry = (DoubleEntry) entry;
rangeBuilder.addRange(validateValue(doubleEntry.getFrom(), Double.NEGATIVE_INFINITY), validateValue(doubleEntry.getTo(), Double.POSITIVE_INFINITY));
}
rangeBuilder.subAggregation(AggregationBuilders.extendedStats(INTERNAL_STATS));
rangeBuilder.subAggregation(AggregationBuilders.extendedStats(INTERNAL_STATS).field(field));
if(StringUtils.isNotBlank(valueField)){
rangeBuilder.subAggregation(AggregationBuilders.sum(RANGE_INTERNAL_SUM).field(valueField));
}

View File

@ -18,10 +18,12 @@ package org.springframework.data.elasticsearch.core.facet.request;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.springframework.data.elasticsearch.core.facet.AbstractFacetRequest;
import org.springframework.util.Assert;
@ -75,7 +77,7 @@ public class TermFacetRequest extends AbstractFacetRequest {
@Override
public AbstractAggregationBuilder getFacet() {
Assert.notEmpty(fields, "Please select at last one field !!!");
TermsBuilder termsBuilder = AggregationBuilders.terms(getName()).field(fields[0]).size(this.size);
final TermsAggregationBuilder termsBuilder = AggregationBuilders.terms(getName()).field(fields[0]).size(this.size);
switch (order) {
case descTerm:
@ -91,7 +93,7 @@ public class TermFacetRequest extends AbstractFacetRequest {
termsBuilder.order(Terms.Order.count(true));
}
if (ArrayUtils.isNotEmpty(excludeTerms)) {
termsBuilder.exclude(excludeTerms);
termsBuilder.includeExclude(new IncludeExclude(null,excludeTerms));
}
if (allTerms) {
@ -99,7 +101,7 @@ public class TermFacetRequest extends AbstractFacetRequest {
}
if (StringUtils.isNotBlank(regex)) {
termsBuilder.include(regex);
termsBuilder.includeExclude(new IncludeExclude(new RegExp(regex),null));
}
return termsBuilder;

View File

@ -1,10 +1,8 @@
package org.springframework.data.elasticsearch.core.geo;
import java.io.IOException;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.Version;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
@ -32,39 +30,15 @@ public class CustomGeoModule extends SimpleModule {
}
class PointSerializer extends JsonSerializer<Point> {
@Override
public void serialize(Point value, JsonGenerator gen, SerializerProvider serializers) throws IOException, JsonProcessingException {
public void serialize(Point value, JsonGenerator gen, SerializerProvider serializers) throws IOException {
gen.writeObject(GeoPoint.fromPoint(value));
// gen.writeStartObject();
// gen.writeNumberField("lat", value.getY());
// gen.writeNumberField("lon", value.getX());
// gen.writeEndObject();
}
}
class PointDeserializer extends JsonDeserializer<Point> {
@Override
public Point deserialize(JsonParser p, DeserializationContext context) throws IOException, JsonProcessingException {
GeoPoint point = p.readValueAs(GeoPoint.class);
// Double lat = null;
// Double lon = null;
// //skipp field name
// p.nextFieldName();
// if ("lat".equals(p.getCurrentName())) {
// //get value
// p.nextFieldName();
// lat = p.getDoubleValue();
// p.nextFieldName();
// }
// if ("lon".equals(p.getCurrentName())) {
// //get value
// p.nextFieldName();
// lon = p.getDoubleValue();
// }
// return new Point(lon, lat);
return GeoPoint.toPoint(point);
public Point deserialize(JsonParser p, DeserializationContext context) throws IOException {
return GeoPoint.toPoint(p.readValueAs(GeoPoint.class));
}
}

View File

@ -20,7 +20,7 @@ import java.util.List;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.springframework.data.elasticsearch.core.facet.FacetRequest;

View File

@ -22,7 +22,7 @@ import java.util.List;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.springframework.data.domain.Pageable;
import org.springframework.data.elasticsearch.core.facet.FacetRequest;

View File

@ -17,7 +17,6 @@ package org.springframework.data.elasticsearch.core.query;
import java.util.Collection;
import java.util.List;
import org.elasticsearch.action.search.SearchType;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
@ -43,13 +42,6 @@ public interface Query {
*/
<T extends Query> T setPageable(Pageable pageable);
/**
* Get filter queries if defined
*
* @return
*/
// List<FilterQuery> getFilterQueries();
/**
* Get page settings if defined
*

View File

@ -19,7 +19,7 @@ import java.util.List;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.springframework.data.elasticsearch.core.facet.FacetRequest;

View File

@ -48,7 +48,7 @@ public @interface EnableElasticsearchRepositories {
/**
* Base packages to scan for annotated components. {@link #value()} is an alias for (and mutually exclusive with) this
* attribute. Use {@link #basePackageClasses()} for a type-safe alternative to String-based package names.
* attribute. Use {@link #basePackageClasses()} for a type-safe alternative to text-based package names.
*/
String[] basePackages() default {};

View File

@ -61,7 +61,8 @@ public class ElasticsearchPartQuery extends AbstractElasticsearchRepositoryQuery
} else if (queryMethod.isStreamQuery()) {
Class<?> entityType = queryMethod.getEntityInformation().getJavaType();
if (query.getPageable().isUnpaged()) {
query.setPageable(PageRequest.of(0, 20));
int itemCount = (int) elasticsearchOperations.count(query, queryMethod.getEntityInformation().getJavaType());
query.setPageable(PageRequest.of(0, Math.max(1, itemCount)));
}
return StreamUtils.createStreamFromIterator((CloseableIterator<Object>) elasticsearchOperations.stream(query, entityType));

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="WARN">
<Appenders>
<Console name="Console" target="SYSTEM_OUT">
<PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
</Console>
</Appenders>
<Loggers>
<Root level="error">
<AppenderRef ref="Console"/>
</Root>
</Loggers>
</Configuration>

View File

@ -20,7 +20,9 @@ import static org.apache.commons.lang.RandomStringUtils.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
@ -37,7 +39,7 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("classpath:/repository-test-nested-object.xml")
@ContextConfiguration("classpath:/repository-test-nested-object-books.xml")
public class InnerObjectTests {
@Autowired private SampleElasticSearchBookRepository bookRepository;

View File

@ -28,6 +28,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.junit.Before;
@ -129,7 +130,7 @@ public class NestedObjectTests {
elasticsearchTemplate.bulkIndex(indexQueries);
elasticsearchTemplate.refresh(Person.class);
final QueryBuilder builder = nestedQuery("car", boolQuery().must(termQuery("car.name", "saturn")).must(termQuery("car.model", "imprezza")));
final QueryBuilder builder = nestedQuery("car", boolQuery().must(termQuery("car.name", "saturn")).must(termQuery("car.model", "imprezza")), ScoreMode.None);
final SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(builder).build();
final List<Person> persons = elasticsearchTemplate.queryForList(searchQuery, Person.class);
@ -186,8 +187,8 @@ public class NestedObjectTests {
//then
final BoolQueryBuilder builder = boolQuery();
builder.must(nestedQuery("girlFriends", termQuery("girlFriends.type", "temp")))
.must(nestedQuery("girlFriends.cars", termQuery("girlFriends.cars.name", "Ford".toLowerCase())));
builder.must(nestedQuery("girlFriends", termQuery("girlFriends.type", "temp"),ScoreMode.None))
.must(nestedQuery("girlFriends.cars", termQuery("girlFriends.cars.name", "Ford".toLowerCase()),ScoreMode.None));
final SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(builder)
@ -325,7 +326,7 @@ public class NestedObjectTests {
elasticsearchTemplate.bulkIndex(indexQueries);
elasticsearchTemplate.refresh(Person.class);
final QueryBuilder builder = nestedQuery("books", boolQuery().must(termQuery("books.name", "java")));
final QueryBuilder builder = nestedQuery("books", boolQuery().must(termQuery("books.name", "java")), ScoreMode.None);
final SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(builder).build();
final List<Person> persons = elasticsearchTemplate.queryForList(searchQuery, Person.class);
@ -373,7 +374,7 @@ public class NestedObjectTests {
elasticsearchTemplate.refresh(Book.class);
//then
final SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(nestedQuery("buckets", termQuery("buckets.1", "test3")))
.withQuery(nestedQuery("buckets", termQuery("buckets.1", "test3"),ScoreMode.None))
.build();
final Page<Book> books = elasticsearchTemplate.queryForPage(searchQuery, Book.class);

View File

@ -1,139 +0,0 @@
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.springframework.data.annotation.Id;
import org.springframework.data.annotation.Version;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.ScriptedField;
/**
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@Document(indexName = "test-index", type = "test-type", shards = 1, replicas = 0, refreshInterval = "-1")
public class SampleEntity {
@Id
private String id;
private String type;
private String message;
private int rate;
@ScriptedField
private Long scriptedRate;
private boolean available;
private String highlightedMessage;
@Version
private Long version;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public int getRate() {
return rate;
}
public void setRate(int rate) {
this.rate = rate;
}
public Long getScriptedRate() {
return scriptedRate;
}
public void setScriptedRate(Long scriptedRate) {
this.scriptedRate = scriptedRate;
}
public boolean isAvailable() {
return available;
}
public void setAvailable(boolean available) {
this.available = available;
}
public String getHighlightedMessage() {
return highlightedMessage;
}
public void setHighlightedMessage(String highlightedMessage) {
this.highlightedMessage = highlightedMessage;
}
public Long getVersion() {
return version;
}
public void setVersion(Long version) {
this.version = version;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SampleEntity)) {
return false;
}
if (this == obj) {
return true;
}
SampleEntity rhs = (SampleEntity) obj;
return new EqualsBuilder().append(this.id, rhs.id).append(this.type, rhs.type).append(this.message, rhs.message)
.append(this.rate, rhs.rate).append(this.available, rhs.available).append(this.version, rhs.version).isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(id).append(type).append(message).append(rate).append(available).append(version)
.toHashCode();
}
@Override
public String toString() {
return "SampleEntity{" +
"id='" + id + '\'' +
", type='" + type + '\'' +
", message='" + message + '\'' +
", rate=" + rate +
", available=" + available +
", highlightedMessage='" + highlightedMessage + '\'' +
", version=" + version +
'}';
}
}

View File

@ -1,140 +0,0 @@
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.elasticsearch;
import java.util.UUID;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.springframework.data.annotation.Id;
import org.springframework.data.annotation.Version;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.ScriptedField;
/**
* @author Gad Akuka
*/
@Document(indexName = "test-index", type = "test-type", indexStoreType = "memory", shards = 1, replicas = 0, refreshInterval = "-1")
public class SampleEntityUUIDKeyed {
@Id
private UUID id;
private String type;
private String message;
private int rate;
@ScriptedField
private Long scriptedRate;
private boolean available;
private String highlightedMessage;
@Version
private Long version;
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public int getRate() {
return rate;
}
public void setRate(int rate) {
this.rate = rate;
}
public Long getScriptedRate() {
return scriptedRate;
}
public void setScriptedRate(Long scriptedRate) {
this.scriptedRate = scriptedRate;
}
public boolean isAvailable() {
return available;
}
public void setAvailable(boolean available) {
this.available = available;
}
public String getHighlightedMessage() {
return highlightedMessage;
}
public void setHighlightedMessage(String highlightedMessage) {
this.highlightedMessage = highlightedMessage;
}
public Long getVersion() {
return version;
}
public void setVersion(Long version) {
this.version = version;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SampleEntityUUIDKeyed)) {
return false;
}
if (this == obj) {
return true;
}
SampleEntityUUIDKeyed rhs = (SampleEntityUUIDKeyed) obj;
return new EqualsBuilder().append(this.id, rhs.id).append(this.type, rhs.type).append(this.message, rhs.message)
.append(this.rate, rhs.rate).append(this.available, rhs.available).append(this.version, rhs.version).isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(id).append(type).append(message).append(rate).append(available).append(version)
.toHashCode();
}
@Override
public String toString() {
return "SampleEntity{" +
"id='" + id + '\'' +
", type='" + type + '\'' +
", message='" + message + '\'' +
", rate=" + rate +
", available=" + available +
", highlightedMessage='" + highlightedMessage + '\'' +
", version=" + version +
'}';
}
}

View File

@ -15,24 +15,36 @@
*/
package org.springframework.data.elasticsearch;
import static org.elasticsearch.node.NodeBuilder.*;
import java.util.UUID;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.NodeValidationException;
import org.elasticsearch.transport.Netty4Plugin;
import org.springframework.data.elasticsearch.client.NodeClientFactoryBean;
import static java.util.Arrays.*;
/**
* @author Mohsin Husen
* @author Artur Konczak
*/
public class Utils {
public static NodeClient getNodeClient() {
return (NodeClient) nodeBuilder().settings(Settings.builder()
.put("http.enabled", "false")
.put("path.data", "target/elasticsearchTestData")
.put("path.home", "src/test/resources/test-home-dir"))
.clusterName(UUID.randomUUID().toString()).local(true).node()
.client();
public static Client getNodeClient() throws NodeValidationException {
String pathHome = "src/test/resources/test-home-dir";
String pathData = "target/elasticsearchTestData";
String clusterName = UUID.randomUUID().toString();
return new NodeClientFactoryBean.TestNode(
Settings.builder()
.put("transport.type", "netty4")
.put("transport.type", "local")
.put("http.type", "netty4")
.put("path.home", pathHome)
.put("path.data", pathData)
.put("cluster.name", clusterName)
.put("node.max_local_storage_nodes", 100)
.put("script.inline", "true")
.build(), asList(Netty4Plugin.class)).start().client();
}
}

View File

@ -23,7 +23,6 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.data.elasticsearch.client.NodeClientFactoryBean;
import org.springframework.data.elasticsearch.client.TransportClientFactoryBean;
import org.springframework.data.elasticsearch.repositories.sample.SampleElasticsearchRepository;
import org.springframework.test.context.ContextConfiguration;
@ -41,12 +40,6 @@ public class ElasticsearchNamespaceHandlerTests {
@Autowired
private ApplicationContext context;
@Test
public void shouldCreatesNodeClient() {
assertThat(context.getBean(NodeClientFactoryBean.class), is(notNullValue()));
assertThat(context.getBean(NodeClientFactoryBean.class), is(instanceOf(NodeClientFactoryBean.class)));
}
@Test
public void shouldCreateTransportClient() {
assertThat(context.getBean(TransportClientFactoryBean.class), is(notNullValue()));

View File

@ -20,6 +20,7 @@ import static org.junit.Assert.*;
import java.util.Arrays;
import org.elasticsearch.node.NodeValidationException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.BeansException;
@ -62,7 +63,7 @@ public class EnableElasticsearchRepositoriesTests implements ApplicationContextA
static class Config {
@Bean
public ElasticsearchOperations elasticsearchTemplate() {
public ElasticsearchOperations elasticsearchTemplate() throws NodeValidationException {
return new ElasticsearchTemplate(Utils.getNodeClient());
}
}

View File

@ -17,6 +17,7 @@ package org.springframework.data.elasticsearch.config;
import static org.junit.Assert.*;
import org.elasticsearch.node.NodeValidationException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
@ -44,7 +45,7 @@ public class EnableNestedElasticsearchRepositoriesTests {
static class Config {
@Bean
public ElasticsearchOperations elasticsearchTemplate() {
public ElasticsearchOperations elasticsearchTemplate() throws NodeValidationException {
return new ElasticsearchTemplate(Utils.getNodeClient());
}
}

View File

@ -29,13 +29,13 @@ public class CustomEntityMapper implements EntityMapper {
@Override
public String mapToString(Object object) throws IOException {
//mapping Object to String
//mapping Object to text
return null;
}
@Override
public <T> T mapToObject(String source, Class<T> clazz) throws IOException {
//mapping String to Object
//mapping text to Object
return null;
}
}

View File

@ -195,7 +195,7 @@ public class DefaultResultMapperTests {
return result;
}
@Document(indexName = "someIndex")
@Document(indexName = "test-index-immutable-internal")
@NoArgsConstructor(force = true)
@Getter
static class ImmutableEntity {

View File

@ -22,6 +22,7 @@ import static org.junit.Assert.*;
import java.util.List;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
@ -80,7 +81,7 @@ public class ElasticsearchTemplateParentChildTests {
elasticsearchTemplate.refresh(ChildEntity.class);
// find all parents that have the first child
QueryBuilder query = hasChildQuery(ParentEntity.CHILD_TYPE, QueryBuilders.termQuery("name", child1name.toLowerCase()));
QueryBuilder query = hasChildQuery(ParentEntity.CHILD_TYPE, QueryBuilders.termQuery("name", child1name.toLowerCase()), ScoreMode.None);
List<ParentEntity> parents = elasticsearchTemplate.queryForList(new NativeSearchQuery(query), ParentEntity.class);
// we're expecting only the first parent as result

View File

@ -15,20 +15,14 @@
*/
package org.springframework.data.elasticsearch.core;
import static org.apache.commons.lang.RandomStringUtils.*;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import static org.springframework.data.elasticsearch.utils.IndexBuilder.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.lang.StringUtils;
import org.elasticsearch.action.get.MultiGetItemResponse;
import org.elasticsearch.action.get.MultiGetResponse;
@ -36,14 +30,13 @@ import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.index.engine.DocumentMissingException;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
@ -55,20 +48,7 @@ import org.springframework.data.elasticsearch.ElasticsearchException;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.core.aggregation.AggregatedPage;
import org.springframework.data.elasticsearch.core.aggregation.impl.AggregatedPageImpl;
import org.springframework.data.elasticsearch.core.query.Criteria;
import org.springframework.data.elasticsearch.core.query.CriteriaQuery;
import org.springframework.data.elasticsearch.core.query.DeleteQuery;
import org.springframework.data.elasticsearch.core.query.FetchSourceFilterBuilder;
import org.springframework.data.elasticsearch.core.query.GetQuery;
import org.springframework.data.elasticsearch.core.query.IndexQuery;
import org.springframework.data.elasticsearch.core.query.IndexQueryBuilder;
import org.springframework.data.elasticsearch.core.query.MoreLikeThisQuery;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.ScriptField;
import org.springframework.data.elasticsearch.core.query.SearchQuery;
import org.springframework.data.elasticsearch.core.query.StringQuery;
import org.springframework.data.elasticsearch.core.query.UpdateQuery;
import org.springframework.data.elasticsearch.core.query.UpdateQueryBuilder;
import org.springframework.data.elasticsearch.core.query.*;
import org.springframework.data.elasticsearch.entities.HetroEntity1;
import org.springframework.data.elasticsearch.entities.HetroEntity2;
import org.springframework.data.elasticsearch.entities.SampleEntity;
@ -77,6 +57,11 @@ import org.springframework.data.elasticsearch.entities.UseServerConfigurationEnt
import org.springframework.data.util.CloseableIterator;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import static org.apache.commons.lang.RandomStringUtils.*;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import static org.springframework.data.elasticsearch.utils.IndexBuilder.*;
/**
* @author Rizwan Idrees
@ -90,7 +75,7 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@ContextConfiguration("classpath:elasticsearch-template-test.xml")
public class ElasticsearchTemplateTests {
private static final String INDEX_NAME = "test-index";
private static final String INDEX_NAME = "test-index-sample";
private static final String INDEX_1_NAME = "test-index-1";
private static final String INDEX_2_NAME = "test-index-2";
private static final String TYPE_NAME = "test-type";
@ -102,6 +87,7 @@ public class ElasticsearchTemplateTests {
public void before() {
elasticsearchTemplate.deleteIndex(SampleEntity.class);
elasticsearchTemplate.createIndex(SampleEntity.class);
elasticsearchTemplate.putMapping(SampleEntity.class);
elasticsearchTemplate.deleteIndex(INDEX_1_NAME);
elasticsearchTemplate.deleteIndex(INDEX_2_NAME);
elasticsearchTemplate.deleteIndex(UseServerConfigurationEntity.class);
@ -225,8 +211,8 @@ public class ElasticsearchTemplateTests {
for (MultiGetItemResponse response : responses.getResponses()) {
SampleEntity entity = new SampleEntity();
entity.setId(response.getResponse().getId());
entity.setMessage((String) response.getResponse().getField("message").getValue());
entity.setType((String) response.getResponse().getField("type").getValue());
entity.setMessage((String) response.getResponse().getSource().get("message"));
entity.setType((String) response.getResponse().getSource().get("type"));
list.add((T) entity);
}
return list;
@ -426,7 +412,7 @@ public class ElasticsearchTemplateTests {
elasticsearchTemplate.refresh(SampleEntity.class);
SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(matchAllQuery())
.withSort(new FieldSortBuilder("rate").ignoreUnmapped(true).order(SortOrder.ASC)).build();
.withSort(new FieldSortBuilder("rate").order(SortOrder.ASC)).build();
// when
Page<SampleEntity> sampleEntities = elasticsearchTemplate.queryForPage(searchQuery, SampleEntity.class);
// then
@ -465,8 +451,8 @@ public class ElasticsearchTemplateTests {
elasticsearchTemplate.refresh(SampleEntity.class);
SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(matchAllQuery())
.withSort(new FieldSortBuilder("rate").ignoreUnmapped(true).order(SortOrder.ASC))
.withSort(new FieldSortBuilder("message").ignoreUnmapped(true).order(SortOrder.ASC)).build();
.withSort(new FieldSortBuilder("rate").order(SortOrder.ASC))
.withSort(new FieldSortBuilder("message").order(SortOrder.ASC)).build();
// when
Page<SampleEntity> sampleEntities = elasticsearchTemplate.queryForPage(searchQuery, SampleEntity.class);
// then
@ -517,12 +503,12 @@ public class ElasticsearchTemplateTests {
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(matchAllQuery())
.withScriptField(new ScriptField("scriptedRate",
new Script("doc['rate'].value * factor", ScriptService.ScriptType.INLINE, null, params)))
new Script(ScriptType.INLINE, "expression", "doc['rate'] * factor", params)))
.build();
Page<SampleEntity> sampleEntities = elasticsearchTemplate.queryForPage(searchQuery, SampleEntity.class);
// then
assertThat(sampleEntities.getTotalElements(), equalTo(1L));
assertThat(sampleEntities.getContent().get(0).getScriptedRate(), equalTo(4L));
assertThat(sampleEntities.getContent().get(0).getScriptedRate(), equalTo(4.0));
}
@Test
@ -546,7 +532,6 @@ public class ElasticsearchTemplateTests {
}
@Test
@Ignore("By default, the search request will fail if there is no mapping associated with a field. The ignore_unmapped option allows to ignore fields that have no mapping and not sort by them")
public void shouldReturnSortedPageableResultsGivenStringQuery() {
// given
String documentId = randomNumeric(5);
@ -563,7 +548,7 @@ public class ElasticsearchTemplateTests {
elasticsearchTemplate.refresh(SampleEntity.class);
StringQuery stringQuery = new StringQuery(matchAllQuery().toString(), new PageRequest(0, 10), new Sort(
new Sort.Order(Sort.Direction.ASC, "messsage")));
new Sort.Order(Sort.Direction.ASC, "message")));
// when
Page<SampleEntity> sampleEntities = elasticsearchTemplate.queryForPage(stringQuery, SampleEntity.class);
// then
@ -594,6 +579,7 @@ public class ElasticsearchTemplateTests {
public void shouldCreateIndexGivenEntityClass() {
// when
boolean created = elasticsearchTemplate.createIndex(SampleEntity.class);
elasticsearchTemplate.putMapping(SampleEntity.class);
final Map setting = elasticsearchTemplate.getSetting(SampleEntity.class);
// then
assertThat(created, is(true));
@ -663,7 +649,7 @@ public class ElasticsearchTemplateTests {
public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable) {
List<String> values = new ArrayList<>();
for (SearchHit searchHit : response.getHits()) {
values.add((String) searchHit.field("message").value());
values.add((String) searchHit.getSource().get("message"));
}
return new AggregatedPageImpl<>((List<T>) values);
}
@ -753,18 +739,13 @@ public class ElasticsearchTemplateTests {
criteriaQuery.addTypes(TYPE_NAME);
criteriaQuery.setPageable(new PageRequest(0, 10));
String scrollId = elasticsearchTemplate.scan(criteriaQuery, 1000, false);
ScrolledPage<SampleEntity> scroll = (ScrolledPage<SampleEntity>) elasticsearchTemplate.startScroll( 1000, criteriaQuery, SampleEntity.class);
List<SampleEntity> sampleEntities = new ArrayList<>();
boolean hasRecords = true;
while (hasRecords) {
Page<SampleEntity> page = elasticsearchTemplate.scroll(scrollId, 5000L, SampleEntity.class);
if (page.hasContent()) {
sampleEntities.addAll(page.getContent());
} else {
hasRecords = false;
}
while (scroll.hasContent()) {
sampleEntities.addAll(scroll.getContent());
scroll = (ScrolledPage<SampleEntity>) elasticsearchTemplate.continueScroll(scroll.getScrollId() , 1000, SampleEntity.class);
}
elasticsearchTemplate.clearScroll(scrollId);
elasticsearchTemplate.clearScroll(scroll.getScrollId());
assertThat(sampleEntities.size(), is(equalTo(30)));
}
@ -780,26 +761,44 @@ public class ElasticsearchTemplateTests {
SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(matchAllQuery()).withIndices(INDEX_NAME)
.withTypes(TYPE_NAME).withPageable(new PageRequest(0, 10)).build();
String scrollId = elasticsearchTemplate.scan(searchQuery, 1000, false);
ScrolledPage<SampleEntity> scroll = (ScrolledPage<SampleEntity>) elasticsearchTemplate.startScroll(1000, searchQuery, SampleEntity.class);
List<SampleEntity> sampleEntities = new ArrayList<>();
boolean hasRecords = true;
while (hasRecords) {
Page<SampleEntity> page = elasticsearchTemplate.scroll(scrollId, 5000L, SampleEntity.class);
if (page.hasContent()) {
sampleEntities.addAll(page.getContent());
} else {
hasRecords = false;
}
while (scroll.hasContent()) {
sampleEntities.addAll(scroll.getContent());
scroll = (ScrolledPage<SampleEntity>) elasticsearchTemplate.continueScroll(scroll.getScrollId() , 1000, SampleEntity.class);
}
elasticsearchTemplate.clearScroll(scrollId);
elasticsearchTemplate.clearScroll(scroll.getScrollId());
assertThat(sampleEntities.size(), is(equalTo(30)));
}
final SearchResultMapper searchResultMapper = new SearchResultMapper() {
@Override
public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable) {
List<SampleEntity> result = new ArrayList<SampleEntity>();
for (SearchHit searchHit : response.getHits()) {
if (response.getHits().getHits().length <= 0) {
return new AggregatedPageImpl<T>(Collections.EMPTY_LIST, response.getScrollId());
}
String message = (String) searchHit.getSource().get("message");
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(searchHit.getId());
sampleEntity.setMessage(message);
result.add(sampleEntity);
}
if (result.size() > 0) {
return new AggregatedPageImpl<T>((List<T>) result, response.getScrollId());
}
return new AggregatedPageImpl<T>(Collections.EMPTY_LIST, response.getScrollId());
}
};
/*
DATAES-167
*/
@Test
public void shouldReturnResultsWithScanAndScrollForSpecifiedFieldsForCriteriaCriteria() {
public void shouldReturnResultsWithScanAndScrollForSpecifiedFieldsForCriteriaQuery() {
//given
List<IndexQuery> entities = createSampleEntitiesWithMessage("Test message", 30);
// when
@ -813,35 +812,15 @@ public class ElasticsearchTemplateTests {
criteriaQuery.addFields("message");
criteriaQuery.setPageable(new PageRequest(0, 10));
String scrollId = elasticsearchTemplate.scan(criteriaQuery, 5000, false);
Page<SampleEntity> scroll = elasticsearchTemplate.startScroll(1000, criteriaQuery, SampleEntity.class, searchResultMapper);
String scrollId = ((ScrolledPage<?>)scroll).getScrollId();
List<SampleEntity> sampleEntities = new ArrayList<>();
boolean hasRecords = true;
while (hasRecords) {
Page<SampleEntity> page = elasticsearchTemplate.scroll(scrollId, 5000L, new SearchResultMapper() {
@Override
public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable) {
List<SampleEntity> result = new ArrayList<>();
for (SearchHit searchHit : response.getHits()) {
String message = searchHit.getFields().get("message").getValue();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(searchHit.getId());
sampleEntity.setMessage(message);
result.add(sampleEntity);
}
if (result.size() > 0) {
return new AggregatedPageImpl<>((List<T>) result);
}
return null;
}
});
if (page != null) {
sampleEntities.addAll(page.getContent());
} else {
hasRecords = false;
while (scroll.hasContent()) {
sampleEntities.addAll(scroll.getContent());
scrollId = ((ScrolledPage<?>)scroll).getScrollId();
scroll = elasticsearchTemplate.continueScroll(scrollId , 1000, SampleEntity.class, searchResultMapper);
}
}
elasticsearchTemplate.clearScroll(scrollId);
elasticsearchTemplate. clearScroll(scrollId);
assertThat(sampleEntities.size(), is(equalTo(30)));
}
@ -865,33 +844,13 @@ public class ElasticsearchTemplateTests {
.withPageable(new PageRequest(0, 10))
.build();
String scrollId = elasticsearchTemplate.scan(searchQuery, 10000, false);
Page<SampleEntity> scroll = elasticsearchTemplate.startScroll(1000, searchQuery, SampleEntity.class, searchResultMapper);
String scrollId = ((ScrolledPage) scroll).getScrollId();
List<SampleEntity> sampleEntities = new ArrayList<>();
boolean hasRecords = true;
while (hasRecords) {
Page<SampleEntity> page = elasticsearchTemplate.scroll(scrollId, 10000L, new SearchResultMapper() {
@Override
public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable) {
List<SampleEntity> result = new ArrayList<>();
for (SearchHit searchHit : response.getHits()) {
String message = searchHit.getFields().get("message").getValue();
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setId(searchHit.getId());
sampleEntity.setMessage(message);
result.add(sampleEntity);
}
if (result.size() > 0) {
return new AggregatedPageImpl<>((List<T>) result);
}
return null;
}
});
if (page != null) {
sampleEntities.addAll(page.getContent());
} else {
hasRecords = false;
}
while (scroll.hasContent()) {
sampleEntities.addAll(scroll.getContent());
scrollId = ((ScrolledPage) scroll).getScrollId();
scroll = elasticsearchTemplate.continueScroll(scrollId, 1000, SampleEntity.class, searchResultMapper);
}
elasticsearchTemplate.clearScroll(scrollId);
assertThat(sampleEntities.size(), is(equalTo(30)));
@ -914,34 +873,13 @@ public class ElasticsearchTemplateTests {
criteriaQuery.addTypes(TYPE_NAME);
criteriaQuery.setPageable(new PageRequest(0, 10));
String scrollId = elasticsearchTemplate.scan(criteriaQuery, 5000, false);
Page<SampleEntity> scroll = elasticsearchTemplate.startScroll(1000, criteriaQuery, SampleEntity.class, searchResultMapper);
String scrollId = ((ScrolledPage) scroll).getScrollId();
List<SampleEntity> sampleEntities = new ArrayList<>();
boolean hasRecords = true;
while (hasRecords) {
Page<SampleEntity> page = elasticsearchTemplate.scroll(scrollId, 5000L, new SearchResultMapper() {
@Override
public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable) {
List<SampleEntity> chunk = new ArrayList<>();
for (SearchHit searchHit : response.getHits()) {
if (response.getHits().getHits().length <= 0) {
return null;
}
SampleEntity user = new SampleEntity();
user.setId(searchHit.getId());
user.setMessage((String) searchHit.getSource().get("message"));
chunk.add(user);
}
if (chunk.size() > 0) {
return new AggregatedPageImpl<>((List<T>) chunk);
}
return null;
}
});
if (page != null) {
sampleEntities.addAll(page.getContent());
} else {
hasRecords = false;
}
while (scroll.hasContent()) {
sampleEntities.addAll(scroll.getContent());
scrollId = ((ScrolledPage) scroll).getScrollId();
scroll = elasticsearchTemplate.continueScroll(scrollId, 1000, SampleEntity.class, searchResultMapper);
}
elasticsearchTemplate.clearScroll(scrollId);
assertThat(sampleEntities.size(), is(equalTo(30)));
@ -959,34 +897,13 @@ public class ElasticsearchTemplateTests {
SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(matchAllQuery()).withIndices(INDEX_NAME)
.withTypes(TYPE_NAME).withPageable(new PageRequest(0, 10)).build();
String scrollId = elasticsearchTemplate.scan(searchQuery, 1000, false);
Page<SampleEntity> scroll = elasticsearchTemplate.startScroll(1000, searchQuery, SampleEntity.class,searchResultMapper);
String scrollId = ((ScrolledPage) scroll).getScrollId();
List<SampleEntity> sampleEntities = new ArrayList<>();
boolean hasRecords = true;
while (hasRecords) {
Page<SampleEntity> page = elasticsearchTemplate.scroll(scrollId, 5000L, new SearchResultMapper() {
@Override
public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable) {
List<SampleEntity> chunk = new ArrayList<>();
for (SearchHit searchHit : response.getHits()) {
if (response.getHits().getHits().length <= 0) {
return null;
}
SampleEntity user = new SampleEntity();
user.setId(searchHit.getId());
user.setMessage((String) searchHit.getSource().get("message"));
chunk.add(user);
}
if (chunk.size() > 0) {
return new AggregatedPageImpl<>((List<T>) chunk);
}
return null;
}
});
if (page != null) {
sampleEntities.addAll(page.getContent());
} else {
hasRecords = false;
}
while (scroll.hasContent()) {
sampleEntities.addAll(scroll.getContent());
scrollId = ((ScrolledPage) scroll).getScrollId();
scroll = elasticsearchTemplate.continueScroll(scrollId, 1000, SampleEntity.class, searchResultMapper);
}
elasticsearchTemplate.clearScroll(scrollId);
assertThat(sampleEntities.size(), is(equalTo(30)));
@ -1007,16 +924,13 @@ public class ElasticsearchTemplateTests {
CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria());
criteriaQuery.setPageable(new PageRequest(0, 10));
String scrollId = elasticsearchTemplate.scan(criteriaQuery, 1000, false, SampleEntity.class);
Page<SampleEntity> scroll = elasticsearchTemplate.startScroll(1000, criteriaQuery, SampleEntity.class);
String scrollId = ((ScrolledPage) scroll).getScrollId();
List<SampleEntity> sampleEntities = new ArrayList<>();
boolean hasRecords = true;
while (hasRecords) {
Page<SampleEntity> page = elasticsearchTemplate.scroll(scrollId, 5000L, SampleEntity.class);
if (page.hasContent()) {
sampleEntities.addAll(page.getContent());
} else {
hasRecords = false;
}
while (scroll.hasContent()) {
sampleEntities.addAll(scroll.getContent());
scrollId = ((ScrolledPage) scroll).getScrollId();
scroll = elasticsearchTemplate.continueScroll(scrollId, 1000, SampleEntity.class);
}
elasticsearchTemplate.clearScroll(scrollId);
assertThat(sampleEntities.size(), is(equalTo(30)));
@ -1037,16 +951,13 @@ public class ElasticsearchTemplateTests {
SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(matchAllQuery())
.withPageable(new PageRequest(0, 10)).build();
String scrollId = elasticsearchTemplate.scan(searchQuery, 1000, false, SampleEntity.class);
Page<SampleEntity> scroll = elasticsearchTemplate.startScroll(1000, searchQuery, SampleEntity.class);
String scrollId = ((ScrolledPage) scroll).getScrollId();
List<SampleEntity> sampleEntities = new ArrayList<>();
boolean hasRecords = true;
while (hasRecords) {
Page<SampleEntity> page = elasticsearchTemplate.scroll(scrollId, 5000L, SampleEntity.class);
if (page.hasContent()) {
sampleEntities.addAll(page.getContent());
} else {
hasRecords = false;
}
while (scroll.hasContent()) {
sampleEntities.addAll(scroll.getContent());
scrollId = ((ScrolledPage) scroll).getScrollId();
scroll = elasticsearchTemplate.continueScroll(scrollId, 1000, SampleEntity.class);
}
elasticsearchTemplate.clearScroll(scrollId);
assertThat(sampleEntities.size(), is(equalTo(30)));
@ -1175,6 +1086,7 @@ public class ElasticsearchTemplateTests {
public void shouldPutMappingForGivenEntity() throws Exception {
// given
Class entity = SampleMappingEntity.class;
elasticsearchTemplate.deleteIndex(entity);
elasticsearchTemplate.createIndex(entity);
// when
assertThat(elasticsearchTemplate.putMapping(entity), is(true));
@ -1264,9 +1176,10 @@ public class ElasticsearchTemplateTests {
elasticsearchTemplate.index(indexQuery);
elasticsearchTemplate.refresh(SampleEntity.class);
final List<HighlightBuilder.Field> message = new HighlightBuilder().field("message").fields();
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(termQuery("message", "test"))
.withHighlightFields(new HighlightBuilder.Field("message"))
.withHighlightFields(message.toArray(new HighlightBuilder.Field[message.size()]))
.build();
Page<SampleEntity> sampleEntities = elasticsearchTemplate.queryForPage(searchQuery, SampleEntity.class, new SearchResultMapper() {
@ -1399,10 +1312,13 @@ public class ElasticsearchTemplateTests {
// when
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(boolQuery().must(wildcardQuery("message", "*a*")).should(wildcardQuery("message", "*b*")))
.withQuery(boolQuery()
.must(wildcardQuery("message", "*a*"))
.should(wildcardQuery("message", "*b*"))
)
.withIndices(INDEX_NAME)
.withTypes(TYPE_NAME)
.withMinScore(0.5F)
.withMinScore(2.0F)
.build();
Page<SampleEntity> page = elasticsearchTemplate.queryForPage(searchQuery, SampleEntity.class);
@ -1571,7 +1487,7 @@ public class ElasticsearchTemplateTests {
elasticsearchTemplate.index(indexQuery);
elasticsearchTemplate.refresh(SampleEntity.class);
CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria());
criteriaQuery.addIndices("test-index");
criteriaQuery.addIndices(INDEX_NAME);
// when
long count = elasticsearchTemplate.count(criteriaQuery);
// then
@ -1593,7 +1509,7 @@ public class ElasticsearchTemplateTests {
elasticsearchTemplate.refresh(SampleEntity.class);
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(matchAllQuery())
.withIndices("test-index")
.withIndices(INDEX_NAME)
.build();
// when
long count = elasticsearchTemplate.count(searchQuery);
@ -1615,7 +1531,7 @@ public class ElasticsearchTemplateTests {
elasticsearchTemplate.index(indexQuery);
elasticsearchTemplate.refresh(SampleEntity.class);
CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria());
criteriaQuery.addIndices("test-index");
criteriaQuery.addIndices(INDEX_NAME);
criteriaQuery.addTypes("test-type");
// when
long count = elasticsearchTemplate.count(criteriaQuery);
@ -1638,7 +1554,7 @@ public class ElasticsearchTemplateTests {
elasticsearchTemplate.refresh(SampleEntity.class);
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(matchAllQuery())
.withIndices("test-index")
.withIndices(INDEX_NAME)
.withTypes("test-type")
.build();
// when
@ -1952,11 +1868,12 @@ public class ElasticsearchTemplateTests {
elasticsearchTemplate.deleteIndex(SampleEntity.class);
elasticsearchTemplate.createIndex(SampleEntity.class, settings);
elasticsearchTemplate.putMapping(SampleEntity.class);
elasticsearchTemplate.refresh(SampleEntity.class);
// then
Map map = elasticsearchTemplate.getSetting(SampleEntity.class);
assertThat(elasticsearchTemplate.indexExists("test-index"), is(true));
assertThat(elasticsearchTemplate.indexExists(INDEX_NAME), is(true));
assertThat(map.containsKey("index.number_of_replicas"), is(true));
assertThat(map.containsKey("index.number_of_shards"), is(true));
assertThat((String) map.get("index.number_of_replicas"), is("0"));

View File

@ -33,14 +33,8 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.elasticsearch.builder.SampleInheritedEntityBuilder;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.SearchQuery;
import org.springframework.data.elasticsearch.entities.*;
import org.springframework.data.elasticsearch.entities.GeoEntity;
import org.springframework.data.elasticsearch.entities.Group;
import org.springframework.data.elasticsearch.entities.MinimalEntity;
import org.springframework.data.elasticsearch.entities.SampleInheritedEntity;
import org.springframework.data.elasticsearch.entities.SampleTransientEntity;
import org.springframework.data.elasticsearch.entities.SimpleRecursiveEntity;
import org.springframework.data.elasticsearch.entities.StockPrice;
import org.springframework.data.elasticsearch.entities.User;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@ -59,14 +53,16 @@ public class MappingBuilderTests {
@Test
public void shouldNotFailOnCircularReference() {
elasticsearchTemplate.deleteIndex(SimpleRecursiveEntity.class);
elasticsearchTemplate.createIndex(SimpleRecursiveEntity.class);
elasticsearchTemplate.putMapping(SimpleRecursiveEntity.class);
elasticsearchTemplate.refresh(SimpleRecursiveEntity.class);
}
@Test
public void testInfiniteLoopAvoidance() throws IOException {
final String expected = "{\"mapping\":{\"properties\":{\"message\":{\"store\":true,\"" +
"type\":\"string\",\"index\":\"not_analyzed\"," +
"type\":\"text\",\"index\":false," +
"\"analyzer\":\"standard\"}}}}";
XContentBuilder xContentBuilder = MappingBuilder.buildMapping(SampleTransientEntity.class, "mapping", "id", null);
@ -121,9 +117,9 @@ public class MappingBuilderTests {
@Test
public void shouldBuildMappingWithSuperclass() throws IOException {
final String expected = "{\"mapping\":{\"properties\":{\"message\":{\"store\":true,\"" +
"type\":\"string\",\"index\":\"not_analyzed\",\"analyzer\":\"standard\"}" +
"type\":\"text\",\"index\":false,\"analyzer\":\"standard\"}" +
",\"createdDate\":{\"store\":false," +
"\"type\":\"date\",\"index\":\"not_analyzed\"}}}}";
"\"type\":\"date\",\"index\":false}}}}";
XContentBuilder xContentBuilder = MappingBuilder.buildMapping(SampleInheritedEntity.class, "mapping", "id", null);
assertThat(xContentBuilder.string(), is(expected));
@ -186,4 +182,15 @@ public class MappingBuilderTests {
//then
}
@Test
public void shouldMapBooks() {
//given
elasticsearchTemplate.createIndex(Book.class);
elasticsearchTemplate.putMapping(Book.class);
//when
//then
}
}

View File

@ -30,7 +30,7 @@ import org.springframework.data.elasticsearch.entities.SampleDateMappingEntity;
public class SimpleElasticsearchDateMappingTests {
private static final String EXPECTED_MAPPING = "{\"mapping\":{\"properties\":{\"message\":{\"store\":true," +
"\"type\":\"string\",\"index\":\"not_analyzed\",\"analyzer\":\"standard\"},\"customFormatDate\":{\"store\":false,\"type\":\"date\",\"format\":\"dd.MM.yyyy hh:mm\"}," +
"\"type\":\"text\",\"index\":false,\"analyzer\":\"standard\"},\"customFormatDate\":{\"store\":false,\"type\":\"date\",\"format\":\"dd.MM.yyyy hh:mm\"}," +
"\"defaultFormatDate\":{\"store\":false,\"type\":\"date\"},\"basicFormatDate\":{\"store\":false,\"" +
"type\":\"date\",\"format\":\"basic_date\"}}}}";

View File

@ -22,6 +22,7 @@ import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.search.aggregations.Aggregations;
import org.junit.Before;
import org.junit.Test;
@ -36,6 +37,11 @@ import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilde
import org.springframework.data.elasticsearch.core.query.SearchQuery;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertThat;
/**
* @author Rizwan Idrees
@ -81,8 +87,8 @@ public class ElasticsearchTemplateAggregationTests {
// given
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(matchAllQuery())
.withSearchType(COUNT)
.withIndices("articles").withTypes("article")
.withSearchType(SearchType.DEFAULT)
.withIndices("test-index-articles").withTypes("article")
.addAggregation(terms("subjects").field("subject"))
.build();
// when

View File

@ -7,14 +7,14 @@ import org.springframework.data.elasticsearch.annotations.Document;
/**
* @author Mewes Kochheim
*/
@Document(indexName = "test-completion-index", type = "annotated-completion-type", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-annotated-completion", type = "annotated-completion-type", shards = 1, replicas = 0, refreshInterval = "-1")
public class AnnotatedCompletionEntity {
@Id
private String id;
private String name;
@CompletionField(payloads = true, maxInputLength = 100)
@CompletionField(maxInputLength = 100)
private Completion suggest;
private AnnotatedCompletionEntity() {

View File

@ -36,23 +36,12 @@ public class AnnotatedCompletionEntityBuilder {
}
public AnnotatedCompletionEntityBuilder suggest(String[] input) {
return suggest(input, null, null, null);
return suggest(input, null);
}
public AnnotatedCompletionEntityBuilder suggest(String[] input, String output) {
return suggest(input, output, null, null);
}
public AnnotatedCompletionEntityBuilder suggest(String[] input, String output, Object payload) {
return suggest(input, output, payload, null);
}
public AnnotatedCompletionEntityBuilder suggest(String[] input, String output, Object payload, Integer weight) {
public AnnotatedCompletionEntityBuilder suggest(String[] input, Integer weight) {
Completion suggest = new Completion(input);
suggest.setOutput(output);
suggest.setPayload(payload);
suggest.setWeight(weight);
result.setSuggest(suggest);
return this;
}

View File

@ -9,14 +9,14 @@ import org.springframework.data.elasticsearch.annotations.Document;
* @author Mohsin Husen
* @author Mewes Kochheim
*/
@Document(indexName = "test-completion-index", type = "completion-annotation-type", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-completion-annotated", type = "completion-annotation-type", shards = 1, replicas = 0, refreshInterval = "-1")
public class CompletionAnnotatedEntity {
@Id
private String id;
private String name;
@CompletionField(payloads = true)
@CompletionField
private Completion suggest;
private CompletionAnnotatedEntity() {

View File

@ -7,7 +7,7 @@ import org.springframework.data.elasticsearch.annotations.Document;
/**
* @author Mewes Kochheim
*/
@Document(indexName = "test-completion-index", type = "completion-type", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-completion", type = "completion-type", shards = 1, replicas = 0, refreshInterval = "-1")
public class CompletionEntity {
@Id

View File

@ -36,25 +36,11 @@ public class CompletionEntityAnnotatedBuilder {
}
public CompletionEntityAnnotatedBuilder suggest(String[] input) {
return suggest(input, null, null, null);
return suggest(input, null);
}
public CompletionEntityAnnotatedBuilder suggest(String[] input, String output) {
return suggest(input, output, null, null);
}
public CompletionEntityAnnotatedBuilder suggest(String[] input, String output, Object payload) {
return suggest(input, output, payload, null);
}
public CompletionEntityAnnotatedBuilder suggest(String[] input, String output, Object payload, Integer weight) {
public CompletionEntityAnnotatedBuilder suggest(String[] input, Integer weight) {
Completion suggest = new Completion(input);
if (output != null) {
suggest.setOutput(output);
}
if (payload != null) {
suggest.setPayload(payload);
}
if (weight != null) {
suggest.setWeight(weight);
}

View File

@ -34,21 +34,11 @@ public class CompletionEntityBuilder {
}
public CompletionEntityBuilder suggest(String[] input) {
return suggest(input, null, null, null);
return suggest(input, null);
}
public CompletionEntityBuilder suggest(String[] input, String output) {
return suggest(input, output, null, null);
}
public CompletionEntityBuilder suggest(String[] input, String output, Object payload) {
return suggest(input, output, payload, null);
}
public CompletionEntityBuilder suggest(String[] input, String output, Object payload, Integer weight) {
public CompletionEntityBuilder suggest(String[] input, Integer weight) {
Completion suggest = new Completion(input);
suggest.setOutput(output);
suggest.setPayload(payload);
suggest.setWeight(weight);
result.setSuggest(suggest);

View File

@ -22,9 +22,13 @@ import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.List;
import org.elasticsearch.action.suggest.SuggestResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.SuggestBuilders;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionFuzzyBuilder;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
@ -34,6 +38,13 @@ import org.springframework.data.elasticsearch.entities.NonDocumentEntity;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertEquals;
/**
* @author Rizwan Idrees
* @author Mohsin Husen
@ -57,8 +68,8 @@ public class ElasticsearchTemplateCompletionTests {
List<IndexQuery> indexQueries = new ArrayList<>();
indexQueries.add(new CompletionEntityBuilder("1").name("Rizwan Idrees").suggest(new String[]{"Rizwan Idrees"}).buildIndex());
indexQueries.add(new CompletionEntityBuilder("2").name("Franck Marchand").suggest(new String[]{"Franck", "Marchand"}).buildIndex());
indexQueries.add(new CompletionEntityBuilder("3").name("Mohsin Husen").suggest(new String[]{"Mohsin", "Husen"}, "Mohsin Husen").buildIndex());
indexQueries.add(new CompletionEntityBuilder("4").name("Artur Konczak").suggest(new String[]{"Artur", "Konczak"}, "Artur Konczak").buildIndex());
indexQueries.add(new CompletionEntityBuilder("3").name("Mohsin Husen").suggest(new String[]{"Mohsin", "Husen"}).buildIndex());
indexQueries.add(new CompletionEntityBuilder("4").name("Artur Konczak").suggest(new String[]{"Artur", "Konczak"}).buildIndex());
elasticsearchTemplate.bulkIndex(indexQueries);
elasticsearchTemplate.refresh(CompletionEntity.class);
@ -76,29 +87,9 @@ public class ElasticsearchTemplateCompletionTests {
List<IndexQuery> indexQueries = new ArrayList<>();
indexQueries.add(new AnnotatedCompletionEntityBuilder("1").name("Franck Marchand").suggest(new String[]{"Franck", "Marchand"}).buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("2").name("Mohsin Husen").suggest(new String[]{"Mohsin", "Husen"}, "Mohsin Husen").buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("3").name("Rizwan Idrees").suggest(new String[]{"Rizwan", "Idrees"}, "Rizwan Idrees").buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("4").name("Artur Konczak").suggest(new String[]{"Artur", "Konczak"}, "Artur Konczak").buildIndex());
elasticsearchTemplate.bulkIndex(indexQueries);
elasticsearchTemplate.refresh(AnnotatedCompletionEntity.class);
}
private void loadAnnotatedCompletionObjectEntitiesWithPayloads() {
elasticsearchTemplate.deleteIndex(AnnotatedCompletionEntity.class);
elasticsearchTemplate.createIndex(AnnotatedCompletionEntity.class);
elasticsearchTemplate.refresh(AnnotatedCompletionEntity.class);
elasticsearchTemplate.putMapping(AnnotatedCompletionEntity.class);
NonDocumentEntity nonDocumentEntity = new NonDocumentEntity();
nonDocumentEntity.setSomeField1("Payload");
nonDocumentEntity.setSomeField2("test");
List<IndexQuery> indexQueries = new ArrayList<>();
indexQueries.add(new AnnotatedCompletionEntityBuilder("1").name("Mewes Kochheim1").suggest(new String[]{"Mewes Kochheim1"}, null, Double.MAX_VALUE).buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("2").name("Mewes Kochheim2").suggest(new String[]{"Mewes Kochheim2"}, null, Long.MAX_VALUE).buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("3").name("Mewes Kochheim3").suggest(new String[]{"Mewes Kochheim3"}, null, "Payload test").buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("4").name("Mewes Kochheim4").suggest(new String[]{"Mewes Kochheim4"}, null, nonDocumentEntity).buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("2").name("Mohsin Husen").suggest(new String[]{"Mohsin", "Husen"}).buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("3").name("Rizwan Idrees").suggest(new String[]{"Rizwan", "Idrees"}).buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("4").name("Artur Konczak").suggest(new String[]{"Artur", "Konczak"}).buildIndex());
elasticsearchTemplate.bulkIndex(indexQueries);
elasticsearchTemplate.refresh(AnnotatedCompletionEntity.class);
@ -107,14 +98,14 @@ public class ElasticsearchTemplateCompletionTests {
private void loadAnnotatedCompletionObjectEntitiesWithWeights() {
elasticsearchTemplate.deleteIndex(AnnotatedCompletionEntity.class);
elasticsearchTemplate.createIndex(AnnotatedCompletionEntity.class);
elasticsearchTemplate.refresh(AnnotatedCompletionEntity.class);
elasticsearchTemplate.putMapping(AnnotatedCompletionEntity.class);
elasticsearchTemplate.refresh(AnnotatedCompletionEntity.class);
List<IndexQuery> indexQueries = new ArrayList<>();
indexQueries.add(new AnnotatedCompletionEntityBuilder("1").name("Mewes Kochheim1").suggest(new String[]{"Mewes Kochheim1"}).buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("2").name("Mewes Kochheim2").suggest(new String[]{"Mewes Kochheim2"}, null, null, 0).buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("3").name("Mewes Kochheim3").suggest(new String[]{"Mewes Kochheim3"}, null, null, 1).buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("4").name("Mewes Kochheim4").suggest(new String[]{"Mewes Kochheim4"}, null, null, Integer.MAX_VALUE).buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("1").name("Mewes Kochheim1").suggest(new String[]{"Mewes Kochheim1"},4).buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("2").name("Mewes Kochheim2").suggest(new String[]{"Mewes Kochheim2"}, 1).buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("3").name("Mewes Kochheim3").suggest(new String[]{"Mewes Kochheim3"}, 2).buildIndex());
indexQueries.add(new AnnotatedCompletionEntityBuilder("4").name("Mewes Kochheim4").suggest(new String[]{"Mewes Kochheim4"}, Integer.MAX_VALUE).buildIndex());
elasticsearchTemplate.bulkIndex(indexQueries);
elasticsearchTemplate.refresh(AnnotatedCompletionEntity.class);
@ -134,81 +125,45 @@ public class ElasticsearchTemplateCompletionTests {
public void shouldFindSuggestionsForGivenCriteriaQueryUsingCompletionEntity() {
//given
loadCompletionObjectEntities();
CompletionSuggestionFuzzyBuilder completionSuggestionFuzzyBuilder = new CompletionSuggestionFuzzyBuilder("test-suggest")
.text("m")
.field("suggest");
SuggestionBuilder completionSuggestionFuzzyBuilder = SuggestBuilders.completionSuggestion("suggest").prefix("m", Fuzziness.AUTO);
//when
SuggestResponse suggestResponse = elasticsearchTemplate.suggest(completionSuggestionFuzzyBuilder, CompletionEntity.class);
final SearchResponse suggestResponse = elasticsearchTemplate.suggest(new SuggestBuilder().addSuggestion("test-suggest",completionSuggestionFuzzyBuilder), CompletionEntity.class);
CompletionSuggestion completionSuggestion = suggestResponse.getSuggest().getSuggestion("test-suggest");
List<CompletionSuggestion.Entry.Option> options = completionSuggestion.getEntries().get(0).getOptions();
//then
assertThat(options.size(), is(2));
assertThat(options.get(0).getText().string(), isOneOf("Marchand", "Mohsin Husen"));
assertThat(options.get(1).getText().string(), isOneOf("Marchand", "Mohsin Husen"));
assertThat(options.get(0).getText().string(), isOneOf("Marchand", "Mohsin"));
assertThat(options.get(1).getText().string(), isOneOf("Marchand", "Mohsin"));
}
@Test
public void shouldFindSuggestionsForGivenCriteriaQueryUsingAnnotatedCompletionEntity() {
//given
loadAnnotatedCompletionObjectEntities();
CompletionSuggestionFuzzyBuilder completionSuggestionFuzzyBuilder = new CompletionSuggestionFuzzyBuilder("test-suggest")
.text("m")
.field("suggest");
SuggestionBuilder completionSuggestionFuzzyBuilder = SuggestBuilders.completionSuggestion("suggest").prefix("m", Fuzziness.AUTO);
//when
SuggestResponse suggestResponse = elasticsearchTemplate.suggest(completionSuggestionFuzzyBuilder, CompletionEntity.class);
final SearchResponse suggestResponse = elasticsearchTemplate.suggest(new SuggestBuilder().addSuggestion("test-suggest", completionSuggestionFuzzyBuilder), CompletionEntity.class);
CompletionSuggestion completionSuggestion = suggestResponse.getSuggest().getSuggestion("test-suggest");
List<CompletionSuggestion.Entry.Option> options = completionSuggestion.getEntries().get(0).getOptions();
//then
assertThat(options.size(), is(2));
assertThat(options.get(0).getText().string(), isOneOf("Marchand", "Mohsin Husen"));
assertThat(options.get(1).getText().string(), isOneOf("Marchand", "Mohsin Husen"));
}
@Test
public void shouldFindSuggestionsWithPayloadsForGivenCriteriaQueryUsingAnnotatedCompletionEntity() {
//given
loadAnnotatedCompletionObjectEntitiesWithPayloads();
CompletionSuggestionFuzzyBuilder completionSuggestionFuzzyBuilder = new CompletionSuggestionFuzzyBuilder("test-suggest")
.text("m")
.field("suggest");
//when
SuggestResponse suggestResponse = elasticsearchTemplate.suggest(completionSuggestionFuzzyBuilder, CompletionEntity.class);
CompletionSuggestion completionSuggestion = suggestResponse.getSuggest().getSuggestion("test-suggest");
List<CompletionSuggestion.Entry.Option> options = completionSuggestion.getEntries().get(0).getOptions();
//then
assertThat(options.size(), is(4));
for (CompletionSuggestion.Entry.Option option : options) {
if (option.getText().string().equals("Mewes Kochheim1")) {
assertEquals(Double.MAX_VALUE, option.getPayloadAsDouble(), 0);
} else if (option.getText().string().equals("Mewes Kochheim2")) {
assertEquals(Long.MAX_VALUE, option.getPayloadAsLong());
} else if (option.getText().string().equals("Mewes Kochheim3")) {
assertEquals("Payload test", option.getPayloadAsString());
} else if (option.getText().string().equals("Mewes Kochheim4")) {
assertEquals("Payload", option.getPayloadAsMap().get("someField1"));
assertEquals("test", option.getPayloadAsMap().get("someField2"));
} else {
fail("Unexpected option");
}
}
assertThat(options.get(0).getText().string(), isOneOf("Marchand", "Mohsin"));
assertThat(options.get(1).getText().string(), isOneOf("Marchand", "Mohsin"));
}
@Test
public void shouldFindSuggestionsWithWeightsForGivenCriteriaQueryUsingAnnotatedCompletionEntity() {
//given
loadAnnotatedCompletionObjectEntitiesWithWeights();
CompletionSuggestionFuzzyBuilder completionSuggestionFuzzyBuilder = new CompletionSuggestionFuzzyBuilder("test-suggest")
.text("m")
.field("suggest");
SuggestionBuilder completionSuggestionFuzzyBuilder = SuggestBuilders.completionSuggestion("suggest").prefix("m", Fuzziness.AUTO);
//when
SuggestResponse suggestResponse = elasticsearchTemplate.suggest(completionSuggestionFuzzyBuilder, CompletionEntity.class);
final SearchResponse suggestResponse = elasticsearchTemplate.suggest(new SuggestBuilder().addSuggestion("test-suggest",completionSuggestionFuzzyBuilder), AnnotatedCompletionEntity.class);
CompletionSuggestion completionSuggestion = suggestResponse.getSuggest().getSuggestion("test-suggest");
List<CompletionSuggestion.Entry.Option> options = completionSuggestion.getEntries().get(0).getOptions();
@ -218,9 +173,9 @@ public class ElasticsearchTemplateCompletionTests {
if (option.getText().string().equals("Mewes Kochheim1")) {
assertEquals(4, option.getScore(), 0);
} else if (option.getText().string().equals("Mewes Kochheim2")) {
assertEquals(0, option.getScore(), 0);
} else if (option.getText().string().equals("Mewes Kochheim3")) {
assertEquals(1, option.getScore(), 0);
} else if (option.getText().string().equals("Mewes Kochheim3")) {
assertEquals(2, option.getScore(), 0);
} else if (option.getText().string().equals("Mewes Kochheim4")) {
assertEquals(Integer.MAX_VALUE, option.getScore(), 0);
} else {

View File

@ -15,17 +15,14 @@
*/
package org.springframework.data.elasticsearch.core.facet;
import static org.springframework.data.elasticsearch.annotations.FieldIndex.*;
import static org.springframework.data.elasticsearch.annotations.FieldType.*;
import static org.springframework.data.elasticsearch.annotations.FieldType.Integer;
import static org.springframework.data.elasticsearch.annotations.FieldType.text;
import java.util.ArrayList;
import java.util.List;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.InnerField;
import org.springframework.data.elasticsearch.annotations.MultiField;
import org.springframework.data.elasticsearch.annotations.*;
/**
* Simple type to test facets
@ -33,19 +30,20 @@ import org.springframework.data.elasticsearch.annotations.MultiField;
* @author Artur Konczak
* @author Mohsin Husen
*/
@Document(indexName = "articles", type = "article", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-articles", type = "article", shards = 1, replicas = 0, refreshInterval = "-1")
public class ArticleEntity {
@Id
private String id;
private String title;
@Field(type = text, fielddata = true)
private String subject;
@MultiField(
mainField = @Field(type = String, index = analyzed),
mainField = @Field(type = text),
otherFields = {
@InnerField(suffix = "untouched", type = String, store = true, index = not_analyzed),
@InnerField(suffix = "sort", type = String, store = true, indexAnalyzer = "keyword")
@InnerField(suffix = "untouched", type = text, store = true, fielddata = true, indexAnalyzer = "keyword"),
@InnerField(suffix = "sort", type = text, store = true, indexAnalyzer = "keyword")
}
)
private List<String> authors = new ArrayList<>();

View File

@ -20,24 +20,17 @@ import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.data.elasticsearch.core.FacetedPage;
import org.springframework.data.elasticsearch.core.facet.request.HistogramFacetRequestBuilder;
import org.springframework.data.elasticsearch.core.facet.request.NativeFacetRequest;
import org.springframework.data.elasticsearch.core.facet.request.RangeFacetRequestBuilder;
import org.springframework.data.elasticsearch.core.facet.request.StatisticalFacetRequestBuilder;
import org.springframework.data.elasticsearch.core.facet.request.TermFacetRequestBuilder;
import org.springframework.data.elasticsearch.core.facet.result.HistogramResult;
import org.springframework.data.elasticsearch.core.facet.result.IntervalUnit;
import org.springframework.data.elasticsearch.core.facet.result.Range;
import org.springframework.data.elasticsearch.core.facet.result.RangeResult;
import org.springframework.data.elasticsearch.core.facet.result.StatisticalResult;
import org.springframework.data.elasticsearch.core.facet.result.Term;
import org.springframework.data.elasticsearch.core.facet.result.TermResult;
import org.springframework.data.elasticsearch.core.facet.request.*;
import org.springframework.data.elasticsearch.core.facet.result.*;
import org.springframework.data.elasticsearch.core.query.DeleteQuery;
import org.springframework.data.elasticsearch.core.query.IndexQuery;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.SearchQuery;
@ -260,15 +253,15 @@ public class ElasticsearchTemplateFacetTests {
assertThat(facet.getTerms().size(), is(equalTo(3)));
Term term = facet.getTerms().get(0);
assertThat(term.getTerm(), is(Integer.toString(YEAR_2000)));
assertThat(term.getTerm(), is(Long.toString(YEAR_2000)));
assertThat(term.getCount(), is(3l));
term = facet.getTerms().get(1);
assertThat(term.getTerm(), is(Integer.toString(YEAR_2001)));
assertThat(term.getTerm(), is(Long.toString(YEAR_2001)));
assertThat(term.getCount(), is(2l));
term = facet.getTerms().get(2);
assertThat(term.getTerm(), is(Integer.toString(YEAR_2002)));
assertThat(term.getTerm(), is(Long.toString(YEAR_2002)));
assertThat(term.getCount(), is(1l));
assertThat(facet.getTotal(), is(3l));
@ -292,15 +285,15 @@ public class ElasticsearchTemplateFacetTests {
assertThat(facet.getTerms().size(), is(equalTo(3)));
Term term = facet.getTerms().get(0);
assertThat(term.getTerm(), is(Integer.toString(YEAR_2000)));
assertThat(term.getTerm(), is(Long.toString(YEAR_2000)));
assertThat(term.getCount(), is(3l));
term = facet.getTerms().get(1);
assertThat(term.getTerm(), is(Integer.toString(YEAR_2001)));
assertThat(term.getTerm(), is(Long.toString(YEAR_2001)));
assertThat(term.getCount(), is(2l));
term = facet.getTerms().get(2);
assertThat(term.getTerm(), is(Integer.toString(YEAR_2002)));
assertThat(term.getTerm(), is(Long.toString(YEAR_2002)));
assertThat(term.getCount(), is(1l));
assertThat(facet.getTotal(), is(3l));
@ -325,15 +318,15 @@ public class ElasticsearchTemplateFacetTests {
assertThat(facet.getTerms().size(), is(equalTo(7)));
Term term = facet.getTerms().get(0);
assertThat(term.getTerm(), is(Integer.toString(YEAR_2000)));
assertThat(term.getTerm(), is(Long.toString(YEAR_2000)));
assertThat(term.getCount(), is(3l));
term = facet.getTerms().get(1);
assertThat(term.getTerm(), is(Integer.toString(YEAR_2001)));
assertThat(term.getTerm(), is(Long.toString(YEAR_2001)));
assertThat(term.getCount(), is(2l));
term = facet.getTerms().get(2);
assertThat(term.getTerm(), is(Integer.toString(YEAR_2002)));
assertThat(term.getTerm(), is(Long.toString(YEAR_2002)));
assertThat(term.getCount(), is(1l));
term = facet.getTerms().get(3);
@ -376,15 +369,15 @@ public class ElasticsearchTemplateFacetTests {
assertThat(numberFacet.getTerms().size(), is(equalTo(3)));
Term numberTerm = numberFacet.getTerms().get(0);
assertThat(numberTerm.getTerm(), is(Integer.toString(YEAR_2000)));
assertThat(numberTerm.getTerm(), is(Long.toString(YEAR_2000)));
assertThat(numberTerm.getCount(), is(3l));
numberTerm = numberFacet.getTerms().get(1);
assertThat(numberTerm.getTerm(), is(Integer.toString(YEAR_2001)));
assertThat(numberTerm.getTerm(), is(Long.toString(YEAR_2001)));
assertThat(numberTerm.getCount(), is(2l));
numberTerm = numberFacet.getTerms().get(2);
assertThat(numberTerm.getTerm(), is(Integer.toString(YEAR_2002)));
assertThat(numberTerm.getTerm(), is(Long.toString(YEAR_2002)));
assertThat(numberTerm.getCount(), is(1l));
TermResult stringFacet = (TermResult) result.getFacet(stringFacetName);
@ -426,15 +419,15 @@ public class ElasticsearchTemplateFacetTests {
assertThat(facet.getTerms().size(), is(equalTo(3)));
Term term = facet.getTerms().get(0);
assertThat(term.getTerm(), is(Integer.toString(YEAR_2000)));
assertThat(term.getTerm(), is(Long.toString(YEAR_2000)));
assertThat(term.getCount(), is(3l));
term = facet.getTerms().get(1);
assertThat(term.getTerm(), is(Integer.toString(YEAR_2001)));
assertThat(term.getTerm(), is(Long.toString(YEAR_2001)));
assertThat(term.getCount(), is(2l));
term = facet.getTerms().get(2);
assertThat(term.getTerm(), is(Integer.toString(YEAR_2002)));
assertThat(term.getTerm(), is(Long.toString(YEAR_2002)));
assertThat(term.getCount(), is(1l));
assertThat(facet.getTotal(), is(6l));
@ -469,6 +462,7 @@ public class ElasticsearchTemplateFacetTests {
@Test
public void shouldReturnAllTermsForGivenQuery() {
// given
String facetName = "all_authors";
SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(matchAllQuery())
.withFacet(new TermFacetRequestBuilder(facetName).applyQueryFilter().fields("authors.untouched").allTerms().build()).build();

View File

@ -55,9 +55,9 @@ public class ElasticsearchTemplateHistogramFacetTests {
public static final long SEQUECE_CODE_UPDATE = 2;
public static final long SEQUECE_CODE_DELETE = 3;
public static final SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm");
public static final String DATE_18 = "2013-10-18 18:01";
public static final String DATE_17 = "2013-10-18 17:01";
public static final String DATE_16 = "2013-10-18 16:01";
public static final text DATE_18 = "2013-10-18 18:01";
public static final text DATE_17 = "2013-10-18 17:01";
public static final text DATE_16 = "2013-10-18 16:01";
@Autowired
@ -87,7 +87,7 @@ public class ElasticsearchTemplateHistogramFacetTests {
@Test
public void shouldReturnSimpleHistogramFacetForGivenQuery() {
// given
String facetName = "sequenceCodeFacet";
text facetName = "sequenceCodeFacet";
SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(matchAllQuery())
.withFacet(new HistogramFacetRequestBuilder(facetName).field("sequenceCode").interval(1).build()
).build();
@ -115,7 +115,7 @@ public class ElasticsearchTemplateHistogramFacetTests {
@Test
public void shouldReturnDateHistogramFacetForGivenQuery() throws ParseException {
// given
String facetName = "sequenceCodeFacet";
text facetName = "sequenceCodeFacet";
SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(matchAllQuery())
.withFacet(new HistogramFacetRequestBuilder(facetName).field("date").interval(1).timeUnit(TimeUnit.HOURS).build()
).build();

View File

@ -31,7 +31,7 @@ import org.springframework.data.elasticsearch.annotations.Field;
* @author Mohsin Husen
*/
@Document(indexName = "test-log-index", type = "test-log-type", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-log", type = "test-log-type", shards = 1, replicas = 0, refreshInterval = "-1")
public class LogEntity {
private static final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");

View File

@ -22,7 +22,7 @@ import org.springframework.data.elasticsearch.annotations.Document;
* @author Franck Marchand
* @author Mohsin Husen
*/
@Document(indexName = "test-geo-index", type = "geo-class-point-type", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-author-marker", type = "geo-class-point-type", shards = 1, replicas = 0, refreshInterval = "-1")
public class AuthorMarkerEntity {
@Id

View File

@ -15,13 +15,9 @@
*/
package org.springframework.data.elasticsearch.core.geo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.List;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.index.query.QueryBuilders;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -34,14 +30,24 @@ import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilde
import org.springframework.data.geo.Point;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import com.spatial4j.core.io.GeohashUtils;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
/**
* @author Rizwan Idrees
* @author Mohsin Husen
* @author Franck Marchand
* @author Artur Konczak
*
* Basic info:
* latitude - horizontal lines (equator = 0.0, values -90.0 to 90.0)
* longitude - vertical lines (Greenwich = 0.0, values -180 to 180)
* London [lat,lon] = [51.50985,-0.118082] - geohash = gcpvj3448
* Bouding Box for London = (bbox=-0.489,51.28,0.236,51.686)
* bbox = left,bottom,right,top
* bbox = min Longitude , min Latitude , max Longitude , max Latitude
*
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("classpath:elasticsearch-template-test.xml")
@ -53,8 +59,8 @@ public class ElasticsearchTemplateGeoTests {
private void loadClassBaseEntities() {
elasticsearchTemplate.deleteIndex(AuthorMarkerEntity.class);
elasticsearchTemplate.createIndex(AuthorMarkerEntity.class);
elasticsearchTemplate.refresh(AuthorMarkerEntity.class);
elasticsearchTemplate.putMapping(AuthorMarkerEntity.class);
elasticsearchTemplate.refresh(AuthorMarkerEntity.class);
List<IndexQuery> indexQueries = new ArrayList<>();
indexQueries.add(new AuthorMarkerEntityBuilder("1").name("Franck Marchand").location(45.7806d, 3.0875d).buildIndex());
@ -67,37 +73,38 @@ public class ElasticsearchTemplateGeoTests {
private void loadAnnotationBaseEntities() {
elasticsearchTemplate.deleteIndex(LocationMarkerEntity.class);
elasticsearchTemplate.createIndex(LocationMarkerEntity.class);
elasticsearchTemplate.refresh(LocationMarkerEntity.class);
elasticsearchTemplate.putMapping(LocationMarkerEntity.class);
elasticsearchTemplate.refresh(LocationMarkerEntity.class);
List<IndexQuery> indexQueries = new ArrayList<>();
double[] latLonArray = {0.100000, 51.000000};
String lonLatString = "51.000000, 0.100000";
String geohash = "u1044k2bd6u";
double[] lonLatArray = {0.100000, 51.000000};
String latLonString = "51.000000, 0.100000";
String geohash = "u10j46mkfekr";
GeoHashUtils.stringEncode(0.100000,51.000000);
LocationMarkerEntity location1 = LocationMarkerEntity.builder()
.id("1").name("Artur Konczak")
.locationAsString(lonLatString)
.locationAsArray(latLonArray)
.locationWithPrefixAsDistance(geohash)
.locationWithPrefixAsLengthOfGeoHash(geohash)
.locationAsString(latLonString)
.locationAsArray(lonLatArray)
.locationAsGeoHash(geohash)
.build();
LocationMarkerEntity location2 = LocationMarkerEntity.builder()
.id("2").name("Mohsin Husen")
.locationAsString(geohash.substring(0, 5))
.locationAsArray(latLonArray)
.locationAsString(geohash.substring(0, 8))
.locationAsArray(lonLatArray)
.locationAsGeoHash(geohash.substring(0, 8))
.build();
LocationMarkerEntity location3 = LocationMarkerEntity.builder()
.id("3").name("Rizwan Idrees")
.locationAsString(geohash)
.locationAsArray(latLonArray)
.locationWithPrefixAsLengthOfGeoHash(geohash)
.locationAsArray(lonLatArray)
.locationAsGeoHash(geohash)
.build();
indexQueries.add(buildIndex(location1));
indexQueries.add(buildIndex(location2));
indexQueries.add(buildIndex(location3));
elasticsearchTemplate.bulkIndex(indexQueries);
elasticsearchTemplate.refresh(AuthorMarkerEntity.class);
elasticsearchTemplate.refresh(LocationMarkerEntity.class);
}
@Test
@ -147,7 +154,7 @@ public class ElasticsearchTemplateGeoTests {
List<LocationMarkerEntity> geoAuthorsForGeoCriteria = elasticsearchTemplate.queryForList(geoLocationCriteriaQuery, LocationMarkerEntity.class);
//then
assertThat(geoAuthorsForGeoCriteria.size(), is(2));
assertThat(geoAuthorsForGeoCriteria.size(), is(1));
}
@Test
@ -193,7 +200,7 @@ public class ElasticsearchTemplateGeoTests {
public void shouldFindAllMarkersForNativeSearchQuery() {
//Given
loadAnnotationBaseEntities();
NativeSearchQueryBuilder queryBuilder = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoBoundingBoxQuery("locationAsArray").topLeft(52, -1).bottomRight(50, 1));
NativeSearchQueryBuilder queryBuilder = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoBoundingBoxQuery("locationAsArray").setCorners(52, -1, 50, 1));
//When
List<LocationMarkerEntity> geoAuthorsForGeoCriteria = elasticsearchTemplate.queryForList(queryBuilder.build(), LocationMarkerEntity.class);
//Then
@ -223,7 +230,7 @@ public class ElasticsearchTemplateGeoTests {
//given
loadClassBaseEntities();
CriteriaQuery geoLocationCriteriaQuery3 = new CriteriaQuery(
new Criteria("location").boundedBy(GeohashUtils.encodeLatLon(53.5171d, 0), GeohashUtils.encodeLatLon(49.5171d, 0.2062d)));
new Criteria("location").boundedBy(GeoHashUtils.stringEncode(0, 53.5171d), GeoHashUtils.stringEncode(0.2062d, 49.5171d)));
//when
List<AuthorMarkerEntity> geoAuthorsForGeoCriteria3 = elasticsearchTemplate.queryForList(geoLocationCriteriaQuery3, AuthorMarkerEntity.class);
@ -270,34 +277,29 @@ public class ElasticsearchTemplateGeoTests {
public void shouldFindLocationWithGeoHashPrefix() {
//given
//u1044k2bd6u - with precision = 4 -> u, u1, u10, u104
//u1044k2bd6u - with precision = 5 -> u, u1, u10, u104, u1044
loadAnnotationBaseEntities();
NativeSearchQueryBuilder locationWithPrefixAsDistancePrecision3 = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoHashCellQuery("box-one").field("locationWithPrefixAsDistance").geohash("u1044k2bd6u").precision(3));
NativeSearchQueryBuilder locationWithPrefixAsDistancePrecision4 = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoHashCellQuery("box-one").field("locationWithPrefixAsDistance").geohash("u1044k2bd6u").precision(4));
NativeSearchQueryBuilder locationWithPrefixAsDistancePrecision5 = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoHashCellQuery("box-one").field("locationWithPrefixAsDistance").geohash("u1044k2bd6u").precision(5));
NativeSearchQueryBuilder location1 = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoBoundingBoxQuery("locationAsGeoHash").setCorners("u"));
NativeSearchQueryBuilder location2 = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoBoundingBoxQuery("locationAsGeoHash").setCorners("u1"));
NativeSearchQueryBuilder location3 = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoBoundingBoxQuery("locationAsGeoHash").setCorners("u10"));
NativeSearchQueryBuilder location4 = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoBoundingBoxQuery("locationAsGeoHash").setCorners("u10j"));
NativeSearchQueryBuilder location5 = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoBoundingBoxQuery("locationAsGeoHash").setCorners("u10j4"));
NativeSearchQueryBuilder location11 = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoBoundingBoxQuery("locationAsGeoHash").setCorners("u10j46mkfek"));
NativeSearchQueryBuilder locationWithPrefixAsLengthOfGeoHashPrecision4 = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoHashCellQuery("box-one").field("locationWithPrefixAsLengthOfGeoHash").geohash("u1044k2bd6u").precision(4));
NativeSearchQueryBuilder locationWithPrefixAsLengthOfGeoHashPrecision5 = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoHashCellQuery("box-one").field("locationWithPrefixAsLengthOfGeoHash").geohash("u1044k2bd6u").precision(5));
NativeSearchQueryBuilder locationWithPrefixAsLengthOfGeoHashPrecision6 = new NativeSearchQueryBuilder().withFilter(QueryBuilders.geoHashCellQuery("box-one").field("locationWithPrefixAsLengthOfGeoHash").geohash("u1044k2bd6u").precision(6));
//when
List<LocationMarkerEntity> resultDistancePrecision3 = elasticsearchTemplate.queryForList(locationWithPrefixAsDistancePrecision3.build(), LocationMarkerEntity.class);
List<LocationMarkerEntity> resultDistancePrecision4 = elasticsearchTemplate.queryForList(locationWithPrefixAsDistancePrecision4.build(), LocationMarkerEntity.class);
List<LocationMarkerEntity> resultDistancePrecision5 = elasticsearchTemplate.queryForList(locationWithPrefixAsDistancePrecision5.build(), LocationMarkerEntity.class);
List<LocationMarkerEntity> resultGeoHashLengthPrecision4 = elasticsearchTemplate.queryForList(locationWithPrefixAsLengthOfGeoHashPrecision4.build(), LocationMarkerEntity.class);
List<LocationMarkerEntity> resultGeoHashLengthPrecision5 = elasticsearchTemplate.queryForList(locationWithPrefixAsLengthOfGeoHashPrecision5.build(), LocationMarkerEntity.class);
List<LocationMarkerEntity> resultGeoHashLengthPrecision6 = elasticsearchTemplate.queryForList(locationWithPrefixAsLengthOfGeoHashPrecision6.build(), LocationMarkerEntity.class);
List<LocationMarkerEntity> result1 = elasticsearchTemplate.queryForList(location1.build(), LocationMarkerEntity.class);
List<LocationMarkerEntity> result2 = elasticsearchTemplate.queryForList(location2.build(), LocationMarkerEntity.class);
List<LocationMarkerEntity> result3 = elasticsearchTemplate.queryForList(location3.build(), LocationMarkerEntity.class);
List<LocationMarkerEntity> result4 = elasticsearchTemplate.queryForList(location4.build(), LocationMarkerEntity.class);
List<LocationMarkerEntity> result5 = elasticsearchTemplate.queryForList(location5.build(), LocationMarkerEntity.class);
List<LocationMarkerEntity> result11 = elasticsearchTemplate.queryForList(location11.build(), LocationMarkerEntity.class);
//then
assertThat(resultDistancePrecision3.size(), is(1));
assertThat(resultDistancePrecision4.size(), is(1));
assertThat(resultDistancePrecision5.size(), is(0));
assertThat(resultGeoHashLengthPrecision4.size(), is(2));
assertThat(resultGeoHashLengthPrecision5.size(), is(2));
assertThat(resultGeoHashLengthPrecision6.size(), is(0));
assertThat(result1.size(), is(3));
assertThat(result2.size(), is(3));
assertThat(result3.size(), is(3));
assertThat(result4.size(), is(3));
assertThat(result5.size(), is(3));
assertThat(result11.size(), is(2));
}
private IndexQuery buildIndex(LocationMarkerEntity result) {

View File

@ -33,22 +33,19 @@ import org.springframework.data.elasticsearch.annotations.GeoPointField;
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Document(indexName = "test-geo-index", type = "geo-annotation-point-type", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-location-marker", type = "geo-annotation-point-type", shards = 1, replicas = 0, refreshInterval = "-1")
public class LocationMarkerEntity {
@Id
private String id;
private String name;
@Id
private String id;
private String name;
@GeoPointField
private String locationAsString;
@GeoPointField
private String locationAsString;
@GeoPointField
private double[] locationAsArray;
@GeoPointField
private double[] locationAsArray;
@GeoPointField(geoHashPrefix = true, geoHashPrecision = "100km")
private String locationWithPrefixAsDistance;
@GeoPointField(geoHashPrefix = true, geoHashPrecision = "5")
private String locationWithPrefixAsLengthOfGeoHash;
@GeoPointField
private String locationAsGeoHash;
}

View File

@ -49,6 +49,7 @@ public class CriteriaQueryTests {
public void before() {
elasticsearchTemplate.deleteIndex(SampleEntity.class);
elasticsearchTemplate.createIndex(SampleEntity.class);
elasticsearchTemplate.putMapping(SampleEntity.class);
elasticsearchTemplate.refresh(SampleEntity.class);
}
@ -715,7 +716,7 @@ public class CriteriaQueryTests {
// when
CriteriaQuery criteriaQuery = new CriteriaQuery(new Criteria("message").contains("a").or(new Criteria("message").contains("b")));
criteriaQuery.setMinScore(0.5F);
criteriaQuery.setMinScore(2.0F);
Page<SampleEntity> page = elasticsearchTemplate.queryForPage(criteriaQuery, SampleEntity.class);
// then
assertThat(page.getTotalElements(), is(1L));

View File

@ -19,7 +19,6 @@ import java.util.Date;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldIndex;
import org.springframework.data.elasticsearch.annotations.FieldType;
/**
@ -30,7 +29,7 @@ public class AbstractInheritedEntity {
@Id
private String id;
@Field(type = FieldType.Date, index = FieldIndex.not_analyzed)
@Field(type = FieldType.Date, index = false)
private Date createdDate;
public String getId() {

View File

@ -39,7 +39,7 @@ import org.springframework.data.elasticsearch.annotations.FieldType;
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Document(indexName = "book", type = "book", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-book", type = "book", shards = 1, replicas = 0, refreshInterval = "-1")
public class Book {
@Id

View File

@ -18,13 +18,15 @@ package org.springframework.data.elasticsearch.entities;
import org.springframework.data.annotation.Id;
import org.springframework.data.annotation.Version;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
/**
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@Document(indexName = "double-keyed-entity", type = "double-keyed-entity", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-double-keyed-entity", type = "double-keyed-entity", shards = 1, replicas = 0, refreshInterval = "-1")
public class DoubleIDEntity {
@Id

View File

@ -25,7 +25,7 @@ import org.springframework.data.elasticsearch.annotations.Setting;
*
* @author Mohsin Husen
*/
@Document(indexName = "test-setting-index", type = "test-setting-type")
@Document(indexName = "test-index-dynamic-setting-and-mapping", type = "test-setting-type")
@Setting(settingPath = "/settings/test-settings.json")
@Mapping(mappingPath = "/mappings/test-mappings.json")
public class DynamicSettingAndMappingEntity {

View File

@ -24,7 +24,7 @@ import org.springframework.data.elasticsearch.annotations.Mapping;
*
* @author Ted Liang
*/
@Document(indexName = "test-field-mapping-index", type = "test-field-mapping-type")
@Document(indexName = "test-index-field-dynamic-mapping", type = "test-field-mapping-type")
public class FieldDynamicMappingEntity {
@Id

View File

@ -23,7 +23,7 @@ import org.springframework.data.geo.Polygon;
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Document(indexName = "geo-test-index", type = "geo-test-index", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-geo", type = "geo-test-index", shards = 1, replicas = 0, refreshInterval = "-1")
public class GeoEntity {
@Id

View File

@ -26,7 +26,7 @@ import org.springframework.data.elasticsearch.annotations.FieldType;
/**
* Created by akonczak on 21/08/2016.
*/
@Document(indexName = "groups", type = "group")
@Document(indexName = "test-index-group", type = "group")
public class Group {
@Id

View File

@ -18,13 +18,15 @@ package org.springframework.data.elasticsearch.entities;
import org.springframework.data.annotation.Id;
import org.springframework.data.annotation.Version;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
/**
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@Document(indexName = "integer-keyed-entity", type = "integer-keyed-entity", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-integer-keyed-entity", type = "integer-keyed-entity", shards = 1, replicas = 0, refreshInterval = "-1")
public class IntegerIDEntity {
@Id

View File

@ -23,7 +23,7 @@ import org.springframework.data.elasticsearch.annotations.Document;
*
* @author Philipp Jardas
*/
@Document(indexName = "index", type = "type")
@Document(indexName = "test-index-minimal", type = "type")
public class MinimalEntity {
@Id

View File

@ -38,7 +38,7 @@ public class ParentEntity {
@Id
private String id;
@Field(type = FieldType.String, index = FieldIndex.analyzed, store = true)
@Field(type = FieldType.text, store = true)
private String name;
public ParentEntity() {
@ -67,10 +67,10 @@ public class ParentEntity {
@Id
private String id;
@Field(type = FieldType.String, store = true)
@Field(type = FieldType.text, store = true)
@Parent(type = PARENT_TYPE)
private String parentId;
@Field(type = FieldType.String, index = FieldIndex.analyzed, store = true)
@Field(type = FieldType.text, store = true)
private String name;
public ChildEntity() {

View File

@ -29,7 +29,7 @@ import org.springframework.data.elasticsearch.annotations.FieldType;
* @author Artur Konczak
*/
@Document(indexName = "person", type = "user", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-person", type = "user", shards = 1, replicas = 0, refreshInterval = "-1")
public class Person {
@Id

View File

@ -29,7 +29,7 @@ import org.springframework.data.elasticsearch.annotations.FieldType;
* @author Artur Konczak
*/
@Document(indexName = "person-multiple-level-nested", type = "user", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-person-multiple-level-nested", type = "user", shards = 1, replicas = 0, refreshInterval = "-1")
public class PersonMultipleLevelNested {
@Id

View File

@ -38,7 +38,7 @@ import org.springframework.data.elasticsearch.annotations.FieldType;
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Document(indexName = "test-product-index", type = "test-product-type", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-product", type = "test-product-type", shards = 1, replicas = 0, refreshInterval = "-1")
public class Product {
@Id

View File

@ -1,7 +1,7 @@
package org.springframework.data.elasticsearch.entities;
import static org.springframework.data.elasticsearch.annotations.FieldIndex.*;
import static org.springframework.data.elasticsearch.annotations.FieldType.*;
import static org.springframework.data.elasticsearch.annotations.FieldType.text;
import java.util.Date;
@ -13,13 +13,13 @@ import org.springframework.data.elasticsearch.annotations.Field;
/**
* @author Jakub Vavrik
*/
@Document(indexName = "test-datemapping", type = "mapping", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-date-mapping", type = "mapping", shards = 1, replicas = 0, refreshInterval = "-1")
public class SampleDateMappingEntity {
@Id
private String id;
@Field(type = String, index = not_analyzed, store = true, analyzer = "standard")
@Field(type = text, index = false, store = true, analyzer = "standard")
private String message;
@Field(type = Date, format = DateFormat.custom, pattern = "dd.MM.yyyy hh:mm")

View File

@ -15,19 +15,21 @@
*/
package org.springframework.data.elasticsearch.entities;
import java.lang.Double;
import java.lang.Long;
import java.lang.Object;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.springframework.data.annotation.Id;
import org.springframework.data.annotation.Version;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
import org.springframework.data.elasticsearch.annotations.ScriptedField;
import org.springframework.data.elasticsearch.core.geo.GeoPoint;
import static org.springframework.data.elasticsearch.annotations.FieldType.*;
/**
* @author Rizwan Idrees
@ -39,17 +41,18 @@ import org.springframework.data.elasticsearch.core.geo.GeoPoint;
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Document(indexName = "test-index", type = "test-type", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-sample", type = "test-type", shards = 1, replicas = 0, refreshInterval = "-1")
public class SampleEntity {
@Id
private String id;
@Field(type = text, store = true, fielddata = true)
private String type;
@Field(type = FieldType.String)
@Field(type = text, store = true, fielddata = true)
private String message;
private int rate;
@ScriptedField
private Long scriptedRate;
private Double scriptedRate;
private boolean available;
private String highlightedMessage;

View File

@ -26,6 +26,8 @@ import java.util.UUID;
import org.springframework.data.annotation.Id;
import org.springframework.data.annotation.Version;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
import org.springframework.data.elasticsearch.annotations.ScriptedField;
import org.springframework.data.elasticsearch.core.geo.GeoPoint;
@ -46,6 +48,7 @@ public class SampleEntityUUIDKeyed {
@Id
private UUID id;
private String type;
@Field(type = FieldType.text, fielddata = true)
private String message;
private int rate;
@ScriptedField

View File

@ -15,8 +15,7 @@
*/
package org.springframework.data.elasticsearch.entities;
import static org.springframework.data.elasticsearch.annotations.FieldIndex.*;
import static org.springframework.data.elasticsearch.annotations.FieldType.*;
import static org.springframework.data.elasticsearch.annotations.FieldType.text;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
@ -24,10 +23,10 @@ import org.springframework.data.elasticsearch.annotations.Field;
/**
* @author Kevin Leturc
*/
@Document(indexName = "test-inherited-mapping", type = "mapping", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-sample-inherited", type = "mapping", shards = 1, replicas = 0, refreshInterval = "-1")
public class SampleInheritedEntity extends AbstractInheritedEntity {
@Field(type = String, index = not_analyzed, store = true, analyzer = "standard")
@Field(type = text, index = false, store = true, analyzer = "standard")
private String message;
public String getMessage() {

View File

@ -15,8 +15,7 @@
*/
package org.springframework.data.elasticsearch.entities;
import static org.springframework.data.elasticsearch.annotations.FieldIndex.*;
import static org.springframework.data.elasticsearch.annotations.FieldType.*;
import static org.springframework.data.elasticsearch.annotations.FieldType.text;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
@ -26,13 +25,13 @@ import org.springframework.data.elasticsearch.annotations.Field;
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@Document(indexName = "test-mapping", type = "mapping", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-sample-mapping", type = "mapping", shards = 1, replicas = 0, refreshInterval = "-1")
public class SampleMappingEntity {
@Id
private String id;
@Field(type = String, index = not_analyzed, store = true, analyzer = "standard")
@Field(type = text, index = false, store = true, analyzer = "standard")
private String message;
private NestedEntity nested;
@ -55,7 +54,7 @@ public class SampleMappingEntity {
static class NestedEntity {
@Field(type = String)
@Field(type = text)
private String someField;
public String getSomeField() {

View File

@ -15,8 +15,7 @@
*/
package org.springframework.data.elasticsearch.entities;
import static org.springframework.data.elasticsearch.annotations.FieldIndex.*;
import static org.springframework.data.elasticsearch.annotations.FieldType.*;
import static org.springframework.data.elasticsearch.annotations.FieldType.text;
import org.springframework.data.annotation.Id;
import org.springframework.data.annotation.Transient;
@ -26,13 +25,13 @@ import org.springframework.data.elasticsearch.annotations.Field;
/**
* @author Jakub Vavrik
*/
@Document(indexName = "test-recursive-mapping", type = "mapping", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-recursive-mapping", type = "mapping", shards = 1, replicas = 0, refreshInterval = "-1")
public class SampleTransientEntity {
@Id
private String id;
@Field(type = String, index = not_analyzed, store = true, analyzer = "standard")
@Field(type = text, index = false, store = true, analyzer = "standard")
private String message;
@Transient

View File

@ -24,7 +24,7 @@ import org.springframework.data.elasticsearch.annotations.FieldType;
* @author Stuart Stevenson
* @author Mohsin Husen
*/
@Document(indexName = "circular-objects", type = "circular-object", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-simple-recursive", type = "circular-object", shards = 1, replicas = 0, refreshInterval = "-1")
public class SimpleRecursiveEntity {
@Id

View File

@ -23,7 +23,7 @@ import org.springframework.data.elasticsearch.annotations.Document;
*
* @author Artur Konczak
*/
@Document(indexName = "#{'abz'+'-'+'entity'}", type = "#{'my'+'Type'}", shards = 1,
@Document(indexName = "#{'test-index-abz'+'-'+'entity'}", type = "#{'my'+'Type'}", shards = 1,
replicas = 0, refreshInterval = "-1")
public class SpELEntity {

View File

@ -37,7 +37,7 @@ import org.springframework.data.elasticsearch.annotations.FieldType;
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Document(indexName = "stock", type = "price", shards = 1, replicas = 0, refreshInterval = "-1")
@Document(indexName = "test-index-stock", type = "price", shards = 1, replicas = 0, refreshInterval = "-1")
public class StockPrice {
@Id

View File

@ -25,7 +25,7 @@ import org.springframework.data.elasticsearch.annotations.Setting;
*
* @author Mohsin Husen
*/
@Document(indexName = "synonym-index", type = "synonym-type")
@Document(indexName = "test-index-synonym", type = "synonym-type")
@Setting(settingPath = "/synonyms/settings.json")
@Mapping(mappingPath = "/synonyms/mappings.json")
public class SynonymEntity {

View File

@ -26,7 +26,7 @@ import org.springframework.data.elasticsearch.annotations.FieldType;
/**
* Created by akonczak on 21/08/2016.
*/
@Document(indexName = "users", type = "user")
@Document(indexName = "test-index-user", type = "user")
public class User {
@Id
private String id;

View File

@ -24,7 +24,7 @@ import org.springframework.data.elasticsearch.annotations.Document;
* @author Young Gu
* @author Oliver Gierke
*/
@Document(indexName = "test-index")
@Document(indexName = "test-index-immutable")
@NoArgsConstructor(force = true)
@Getter
public class ImmutableEntity {

View File

@ -58,6 +58,7 @@ public class CdiRepositoryTests {
CdiRepositoryClient client = cdiContainer.getInstance(CdiRepositoryClient.class);
repository = client.getRepository();
personRepository = client.getSamplePersonRepository();
repository.deleteAll();
qualifiedProductRepository = client.getQualifiedProductRepository();
}

View File

@ -19,7 +19,8 @@ import javax.annotation.PreDestroy;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.inject.Produces;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.client.Client;
import org.elasticsearch.node.NodeValidationException;
import org.springframework.data.elasticsearch.Utils;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
@ -31,20 +32,20 @@ import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
class ElasticsearchTemplateProducer {
@Produces
public NodeClient createNodeClient() {
public Client createNodeClient() throws NodeValidationException {
return Utils.getNodeClient();
}
@Produces
public ElasticsearchOperations createElasticsearchTemplate(NodeClient nodeClient) {
return new ElasticsearchTemplate(nodeClient);
public ElasticsearchOperations createElasticsearchTemplate(Client client) {
return new ElasticsearchTemplate(client);
}
@Produces
@OtherQualifier
@PersonDB
public ElasticsearchOperations createQualifiedElasticsearchTemplate(NodeClient nodeClient) {
return new ElasticsearchTemplate(nodeClient);
public ElasticsearchOperations createQualifiedElasticsearchTemplate(Client client) {
return new ElasticsearchTemplate(client);
}
@PreDestroy

View File

@ -9,7 +9,7 @@ import org.springframework.data.elasticsearch.annotations.Document;
* @author Mason Chan
*/
@Document(indexName = "test-index", type = "test-type", createIndex = false)
@Document(indexName = "test-index-not-create", type = "test-type", createIndex = false)
public class CreateIndexFalseEntity {
@Id
private String id;

View File

@ -120,7 +120,7 @@ public class DynamicSettingAndMappingEntityRepositoryTests {
Map properties = (Map) mapping.get("properties");
assertThat(mapping, is(notNullValue()));
assertThat(properties, is(notNullValue()));
assertThat(((String) ((Map) properties.get("email")).get("type")), is("string"));
assertThat(((String) ((Map) properties.get("email")).get("type")), is("text"));
assertThat((String) ((Map) properties.get("email")).get("analyzer"), is("emailAnalyzer"));
}
@ -145,7 +145,7 @@ public class DynamicSettingAndMappingEntityRepositoryTests {
Map properties = (Map) mapping.get("properties");
assertThat(mapping, is(notNullValue()));
assertThat(properties, is(notNullValue()));
assertThat(((String) ((Map) properties.get("email")).get("type")), is("string"));
assertThat(((String) ((Map) properties.get("email")).get("type")), is("text"));
assertThat((String) ((Map) properties.get("email")).get("analyzer"), is("emailAnalyzer"));
}
@ -161,7 +161,7 @@ public class DynamicSettingAndMappingEntityRepositoryTests {
Map properties = (Map) mapping.get("properties");
assertThat(mapping, is(notNullValue()));
assertThat(properties, is(notNullValue()));
assertThat(((String) ((Map) properties.get("email")).get("type")), is("string"));
assertThat(((String) ((Map) properties.get("email")).get("type")), is("text"));
assertThat((String) ((Map) properties.get("email")).get("analyzer"), is("emailAnalyzer"));
}
}

View File

@ -69,7 +69,7 @@ public class FieldDynamicMappingEntityRepositoryTests {
assertThat(properties.containsKey("file"), is(true));
Map file = (Map) properties.get("file");
assertThat(file, is(notNullValue()));
assertThat(((String) file.get("type")), is("string"));
assertThat(((String) file.get("type")), is("text"));
assertThat(file.containsKey("fields"), is(true));
Map fields = (Map) file.get("fields");
@ -79,7 +79,7 @@ public class FieldDynamicMappingEntityRepositoryTests {
Map content = (Map) fields.get("content");
assertThat(content, is(notNullValue()));
assertThat((String)content.get("type"), is("string"));
assertThat((String)content.get("type"), is("text"));
assertThat((String)content.get("term_vector"), is("with_positions_offsets"));
assertThat((Boolean)content.get("store"), is(Boolean.TRUE));
}

View File

@ -44,6 +44,7 @@ public class ComplexCustomMethodRepositoryManualWiringTests {
public void before() {
elasticsearchTemplate.deleteIndex(SampleEntity.class);
elasticsearchTemplate.createIndex(SampleEntity.class);
elasticsearchTemplate.putMapping(SampleEntity.class);
elasticsearchTemplate.refresh(SampleEntity.class);
}

View File

@ -50,6 +50,7 @@ public class DoubleIDRepositoryTests {
public void before() {
elasticsearchTemplate.deleteIndex(DoubleIDEntity.class);
elasticsearchTemplate.createIndex(DoubleIDEntity.class);
elasticsearchTemplate.putMapping(DoubleIDEntity.class);
elasticsearchTemplate.refresh(DoubleIDEntity.class);
}

View File

@ -50,6 +50,7 @@ public class IntegerIDRepositoryTests {
public void before() {
elasticsearchTemplate.deleteIndex(IntegerIDEntity.class);
elasticsearchTemplate.createIndex(IntegerIDEntity.class);
elasticsearchTemplate.putMapping(IntegerIDEntity.class);
elasticsearchTemplate.refresh(IntegerIDEntity.class);
}

View File

@ -15,16 +15,12 @@
*/
package org.springframework.data.elasticsearch.repository.support;
import static org.apache.commons.lang.RandomStringUtils.*;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.common.util.CollectionUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -32,6 +28,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Order;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.SearchQuery;
@ -39,8 +36,11 @@ import org.springframework.data.elasticsearch.entities.SampleEntity;
import org.springframework.data.elasticsearch.repositories.sample.SampleElasticsearchRepository;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import com.google.common.collect.Lists;
import static org.apache.commons.lang.RandomStringUtils.*;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import static org.springframework.data.domain.Sort.Direction.*;
/**
* @author Rizwan Idrees
@ -60,6 +60,7 @@ public class SimpleElasticsearchRepositoryTests {
public void before() {
elasticsearchTemplate.deleteIndex(SampleEntity.class);
elasticsearchTemplate.createIndex(SampleEntity.class);
elasticsearchTemplate.putMapping(SampleEntity.class);
elasticsearchTemplate.refresh(SampleEntity.class);
}
@ -103,8 +104,8 @@ public class SimpleElasticsearchRepositoryTests {
assertThat(entityFromElasticSearch.isPresent(), is(true));
}
@Test
public void shouldSaveDocumentWithoutId() {
@Test(expected = ActionRequestValidationException.class)
public void throwExceptionWhenTryingToInsertWithVersionButWithoutId() {
// given
SampleEntity sampleEntity = new SampleEntity();
sampleEntity.setMessage("some message");
@ -229,7 +230,7 @@ public class SimpleElasticsearchRepositoryTests {
// then
assertNotNull("sample entities cant be null..", sampleEntities);
List<SampleEntity> entities = Lists.newArrayList(sampleEntities);
List<SampleEntity> entities = CollectionUtils.iterableAsArrayList(sampleEntities);
assertThat(entities.size(), is(2));
}
@ -508,7 +509,7 @@ public class SimpleElasticsearchRepositoryTests {
sampleEntity2.setMessage("hello");
repository.save(sampleEntity2);
// when
Iterable<SampleEntity> sampleEntities = repository.findAll(new Sort(new Sort.Order(Sort.Direction.ASC, "message")));
Iterable<SampleEntity> sampleEntities = repository.findAll(new Sort(new Order(ASC, "message")));
// then
assertThat(sampleEntities, is(notNullValue()));
}

View File

@ -15,17 +15,12 @@
*/
package org.springframework.data.elasticsearch.repository.support;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -40,6 +35,9 @@ import org.springframework.data.elasticsearch.entities.SampleEntityUUIDKeyed;
import org.springframework.data.elasticsearch.repositories.sample.SampleUUIDKeyedElasticsearchRepository;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
/**
* @author Gad Akuka
@ -243,7 +241,7 @@ public class UUIDElasticsearchRepositoryTests {
// when
repository.saveAll(sampleEntities);
// then
Page<SampleEntityUUIDKeyed> entities = repository.search(termQuery("id", documentId), new PageRequest(0, 50));
Page<SampleEntityUUIDKeyed> entities = repository.search(termQuery("id", documentId.toString()), new PageRequest(0, 50));
assertNotNull(entities);
}
@ -286,7 +284,7 @@ public class UUIDElasticsearchRepositoryTests {
// when
long result = repository.deleteSampleEntityUUIDKeyedById(documentId);
// then
SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(termQuery("id", documentId)).build();
SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(termQuery("id", documentId.toString())).build();
Page<SampleEntityUUIDKeyed> sampleEntities = repository.search(searchQuery);
assertThat(sampleEntities.getTotalElements(), equalTo(0L));
assertThat(result, equalTo(1L));
@ -400,7 +398,7 @@ public class UUIDElasticsearchRepositoryTests {
repository.delete(sampleEntityUUIDKeyed);
repository.refresh();
// then
SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(termQuery("id", documentId)).build();
SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(termQuery("id", documentId.toString())).build();
Page<SampleEntityUUIDKeyed> sampleEntities = repository.search(searchQuery);
assertThat(sampleEntities.getTotalElements(), equalTo(0L));
}
@ -423,7 +421,7 @@ public class UUIDElasticsearchRepositoryTests {
repository.save(sampleEntityUUIDKeyed2);
// when
Iterable<SampleEntityUUIDKeyed> sampleEntities = repository.search(termQuery("id", documentId1));
Iterable<SampleEntityUUIDKeyed> sampleEntities = repository.search(termQuery("id", documentId1.toString()));
// then
assertNotNull("sample entities cant be null..", sampleEntities);
}
@ -454,7 +452,6 @@ public class UUIDElasticsearchRepositoryTests {
@Test
public void shouldSortByGivenField() {
// todo
// given
UUID documentId = UUID.randomUUID();
SampleEntityUUIDKeyed sampleEntityUUIDKeyed = new SampleEntityUUIDKeyed();

View File

@ -1,8 +1,8 @@
{
"type": "string",
"type": "text",
"fields": {
"content": {
"type": "string",
"type": "text",
"term_vector":"with_positions_offsets",
"store": true
}

View File

@ -1,3 +1,3 @@
#enabled scripts - this require groovy
script.inline: true
script.indexed: true
#node.max_local_storage_nodes: 100

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:elasticsearch="http://www.springframework.org/schema/data/elasticsearch"
xsi:schemaLocation="http://www.springframework.org/schema/data/elasticsearch http://www.springframework.org/schema/data/elasticsearch/spring-elasticsearch-1.0.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
<import resource="infrastructure.xml"/>
<bean name="elasticsearchTemplate"
class="org.springframework.data.elasticsearch.core.ElasticsearchTemplate">
<constructor-arg name="client" ref="client"/>
</bean>
<elasticsearch:repositories base-package="org.springframework.data.elasticsearch.repositories.book"/>
</beans>

Some files were not shown because too many files have changed in this diff Show More