diff --git a/README.md b/README.md
index 9da07b465..2eb873e7b 100644
--- a/README.md
+++ b/README.md
@@ -133,7 +133,7 @@ Searching entities using Elasticsearch Template
Indexing a single document with Repository
```java
- @Resource
+ @Autowired
private SampleElasticsearchRepository repository;
String documentId = "123456";
@@ -147,7 +147,7 @@ Indexing a single document with Repository
Indexing multiple Document(bulk index) using Repository
```java
- @Resource
+ @Autowired
private SampleElasticsearchRepository repository;
String documentId = "123456";
@@ -371,6 +371,11 @@ Here are some ways for you to get involved in the community:
Before we accept a non-trivial patch or pull request we will need you to sign the [contributor's agreement](https://support.springsource.com/spring_committer_signup). Signing the contributor's agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. Active contributors might be asked to join the core team, and given the ability to merge pull requests.
+
+Code formatting for [Eclipse and Intellij](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide)
+
+[More information about contributing to Spring Data](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.md)
+
### Contact Details
* Rizwan Idrees (rizwan.idrees@biomedcentral.com)
diff --git a/formatting.xml b/formatting.xml
deleted file mode 100644
index b60c70a4e..000000000
--- a/formatting.xml
+++ /dev/null
@@ -1,820 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index cff16b8af..ced57a14f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,5 +1,6 @@
-
+4.0.0org.springframework.data
@@ -152,13 +153,13 @@
-
- spring-plugins-release
- http://repo.spring.io/plugins-release
-
-
+
+ spring-plugins-release
+ http://repo.spring.io/plugins-release
+
+
-
+ https://github.com/SpringSource/spring-data-elasticsearchscm:git:git://github.com/SpringSource/spring-data-elasticsearch.gitscm:git:ssh://git@github.com:SpringSource/spring-data-elasticsearch.git
diff --git a/src/docbkx/index.xml b/src/docbkx/index.xml
index 05f60dde7..d5aff1fcd 100644
--- a/src/docbkx/index.xml
+++ b/src/docbkx/index.xml
@@ -1,62 +1,62 @@
+ "http://www.oasis-open.org/docbook/xml/4.4/docbookx.dtd">
-
- Spring Data Elasticsearch
-
-
- BioMed Central
- Development Team
-
-
-
-
- Copies of this document may be made for your own use and for
- distribution to others, provided that you do not
- charge any fee for
- such copies and further provided that each copy
- contains this
- Copyright Notice, whether
- distributed in print or electronically.
-
-
+
+ Spring Data Elasticsearch
+
+
+ BioMed Central
+ Development Team
+
+
+
+
+ Copies of this document may be made for your own use and for
+ distribution to others, provided that you do not
+ charge any fee for
+ such copies and further provided that each copy
+ contains this
+ Copyright Notice, whether
+ distributed in print or electronically.
+
+
-
- 2013
- The original author(s)
-
-
+
+ 2013
+ The original author(s)
+
+
-
+
-
+
-
- Reference Documentation
+
+ Reference Documentation
-
-
-
+
+
+
-
-
-
+
+
+
-
- Appendix
-
-
-
-
-
-
-
+
+ Appendix
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/docbkx/preface.xml b/src/docbkx/preface.xml
index 2872ded1c..a45737445 100644
--- a/src/docbkx/preface.xml
+++ b/src/docbkx/preface.xml
@@ -1,39 +1,40 @@
+ "http://www.oasis-open.org/docbook/xml/4.4/docbookx.dtd">
- Preface
- The Spring Data Elasticsearch project applies core Spring
- concepts to
- the
- development of solutions using the Elasticsearch Search
- Engine.
- We have povided a "template" as a high-level abstraction for
- storing,querying,sorting and faceting documents. You will notice
- similarities
- to the Spring data solr and
- mongodb support in the Spring Framework.
-
-
- Project Metadata
-
-
-
- Version Control -
- git://github.com/BioMedCentralLtd/spring-data-elasticsearch.git
-
-
-
-
-
-
- Requirements
-
- Requires
- Elasticsearch
- 0.20.2 and above or optional dependency or not even that if you are
- using Embedded Node Client
-
-
+ Preface
+ The Spring Data Elasticsearch project applies core Spring
+ concepts to
+ the
+ development of solutions using the Elasticsearch Search
+ Engine.
+ We have povided a "template" as a high-level abstraction for
+ storing,querying,sorting and faceting documents. You will notice
+ similarities
+ to the Spring data solr and
+ mongodb support in the Spring Framework.
+
+
+ Project Metadata
+
+
+
+ Version Control -
+
+ git://github.com/BioMedCentralLtd/spring-data-elasticsearch.git
+
+
+
+
+
+
+ Requirements
+
+ Requires
+ Elasticsearch
+ 0.20.2 and above or optional dependency or not even that if you are
+ using Embedded Node Client
+
+
\ No newline at end of file
diff --git a/src/docbkx/reference/data-elasticsearch.xml b/src/docbkx/reference/data-elasticsearch.xml
index ee3460570..f983dd7e9 100644
--- a/src/docbkx/reference/data-elasticsearch.xml
+++ b/src/docbkx/reference/data-elasticsearch.xml
@@ -1,487 +1,508 @@
+ "http://www.oasis-open.org/docbook/xml/4.4/docbookx.dtd">
- Elasticsearch Repositories
-
- This chapter includes details of the Elasticsearch repository
- implementation.
-
-
-
- Introduction
+ Elasticsearch Repositories
+
+ This chapter includes details of the Elasticsearch repository
+ implementation.
+
+
+
+ Introduction
-
- Spring Namespace
+
+ Spring Namespace
-
- The Spring Data Elasticsearch module contains a custom namespace
- allowing
- definition of repository beans as well as elements for
- instantiating
- a
- ElasticsearchServer
- .
-
+
+ The Spring Data Elasticsearch module contains a custom namespace
+ allowing
+ definition of repository beans as well as elements for
+ instantiating
+ a
+ ElasticsearchServer
+ .
+
-
- Using the
- repositories
- element looks up Spring Data repositories as described in
-
- .
-
+
+ Using the
+ repositories
+ element looks up Spring Data repositories as described in
+
+ .
+
-
- Setting up Elasticsearch repositories using Namespace
- <?xml version="1.0" encoding="UTF-8"?>
-<beans xmlns="http://www.springframework.org/schema/beans"
-xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-xmlns:elasticsearch="http://www.springframework.org/schema/data/elasticsearch"
-xsi:schemaLocation="http://www.springframework.org/schema/beans
-http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
-http://www.springframework.org/schema/data/elasticsearch
-http://www.springframework.org/schema/data/elasticsearch/spring-elasticsearch-1.0.xsd">
+
+ Setting up Elasticsearch repositories using Namespace
+ <?xml version="1.0" encoding="UTF-8"?>
+ <beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:elasticsearch="http://www.springframework.org/schema/data/elasticsearch"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans
+ http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
+ http://www.springframework.org/schema/data/elasticsearch
+ http://www.springframework.org/schema/data/elasticsearch/spring-elasticsearch-1.0.xsd">
-<elasticsearch:repositories base-package="com.acme.repositories" />
-</beans>
-
+ <elasticsearch:repositories base-package="com.acme.repositories" />
+ </beans>
+
-
- Using the
- Transport Client
- or
- Node Client
- element registers an instance of
- Elasticsearch Server
- in the context.
+
+ Using the
+ Transport Client
+ or
+ Node Client
+ element registers an instance of
+ Elasticsearch Server
+ in the context.
-
- Transport Client using Namespace
- <?xml version="1.0" encoding="UTF-8"?>
-<beans xmlns="http://www.springframework.org/schema/beans"
-xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-xmlns:elasticsearch="http://www.springframework.org/schema/data/elasticsearch"
-xsi:schemaLocation="http://www.springframework.org/schema/beans
-http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
-http://www.springframework.org/schema/data/elasticsearch
-http://www.springframework.org/schema/data/elasticsearch/spring-elasticsearch-1.0.xsd">
+
+ Transport Client using Namespace
+ <?xml version="1.0" encoding="UTF-8"?>
+ <beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:elasticsearch="http://www.springframework.org/schema/data/elasticsearch"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans
+ http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
+ http://www.springframework.org/schema/data/elasticsearch
+ http://www.springframework.org/schema/data/elasticsearch/spring-elasticsearch-1.0.xsd">
-<elasticsearch:transport-client id="client" cluster-nodes="localhost:9300,someip:9300" />
-</beans>
-
+ <elasticsearch:transport-client id="client" cluster-nodes="localhost:9300,someip:9300" />
+ </beans>
+
-
- Node Client using Namespace
- <?xml version="1.0" encoding="UTF-8"?>
-<beans xmlns="http://www.springframework.org/schema/beans"
-xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-xmlns:elasticsearch="http://www.springframework.org/schema/data/elasticsearch"
-xsi:schemaLocation="http://www.springframework.org/schema/beans
-http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
-http://www.springframework.org/schema/data/elasticsearch
-http://www.springframework.org/schema/data/elasticsearch/spring-elasticsearch-1.0.xsd">
+
+ Node Client using Namespace
+ <?xml version="1.0" encoding="UTF-8"?>
+ <beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:elasticsearch="http://www.springframework.org/schema/data/elasticsearch"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans
+ http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
+ http://www.springframework.org/schema/data/elasticsearch
+ http://www.springframework.org/schema/data/elasticsearch/spring-elasticsearch-1.0.xsd">
-<elasticsearch:node-client id="client" local="true"" />
-</beans>
-
-
-
-
- Annotation based configuration
- The Spring Data Elasticsearch repositories support cannot only
- be
- activated through an XML namespace but also using an annotation
- through JavaConfig.
-
-
- Spring Data Elasticsearch repositories using JavaConfig
-
-
-@Configuration
-@EnableElasticsearchRepositories(basePackages = "org/springframework/data/elasticsearch/repositories")
-static class Config {
+ <elasticsearch:node-client id="client" local="true"" />
+ </beans>
+
+
+
+
+ Annotation based configuration
+ The Spring Data Elasticsearch repositories support cannot only
+ be
+ activated through an XML namespace but also using an annotation
+ through JavaConfig.
+
+
+ Spring Data Elasticsearch repositories using JavaConfig
+
+
+ @Configuration
+ @EnableElasticsearchRepositories(basePackages =
+ "org/springframework/data/elasticsearch/repositories")
+ static class Config {
-@Bean
-public ElasticsearchOperations elasticsearchTemplate() {
-return new ElasticsearchTemplate(nodeBuilder().local(true).node().client());
- }
-}
-
- The configuration above sets up an
- Embedded Elasticsearch Server
- which is used by the
- ElasticsearchTemplate
- . Spring Data Elasticsearch Repositories are activated using the
- @EnableElasticsearchRepositories
- annotation, which
- essentially carries the same attributes as the XML
- namespace does. If no
- base package is configured, it will use the
- one
- the configuration class
- resides in.
-
-
-
-
- Elasticsearch Repositores using CDI
- The Spring Data Elasticsearch repositories can also be set up
- using CDI
- functionality.
-
-
- Spring Data Elasticsearch repositories using JavaConfig
-
- class ElasticsearchTemplateProducer {
+ @Bean
+ public ElasticsearchOperations elasticsearchTemplate() {
+ return new ElasticsearchTemplate(nodeBuilder().local(true).node().client());
+ }
+ }
+
+
+ The configuration above sets up an
+ Embedded Elasticsearch Server
+ which is used by the
+ ElasticsearchTemplate
+ . Spring Data Elasticsearch Repositories are activated using the
+ @EnableElasticsearchRepositories
+ annotation, which
+ essentially carries the same attributes as the XML
+ namespace does. If no
+ base package is configured, it will use the
+ one
+ the configuration class
+ resides in.
+
+
+
+
+ Elasticsearch Repositores using CDI
+ The Spring Data Elasticsearch repositories can also be set up
+ using CDI
+ functionality.
+
+
+ Spring Data Elasticsearch repositories using JavaConfig
+
+ class ElasticsearchTemplateProducer {
-@Produces
-@ApplicationScoped
-public ElasticsearchOperations createElasticsearchTemplate() {
- return new ElasticsearchTemplate(nodeBuilder().local(true).node().client());
- }
-}
+ @Produces
+ @ApplicationScoped
+ public ElasticsearchOperations createElasticsearchTemplate() {
+ return new ElasticsearchTemplate(nodeBuilder().local(true).node().client());
+ }
+ }
-class ProductService {
+ class ProductService {
-private ProductRepository repository;
+ private ProductRepository repository;
-public Page<Product> findAvailableBookByName(String name, Pageable pageable) {
- return repository.findByAvailableTrueAndNameStartingWith(name, pageable);
-}
+ public Page<Product> findAvailableBookByName(String name, Pageable pageable) {
+ return repository.findByAvailableTrueAndNameStartingWith(name, pageable);
+ }
-@Inject
-public void setRepository(ProductRepository repository) {
- this.repository = repository;
- }
-}
-
-
-
-
- Query methods
-
- Query lookup strategies
-
- The Elasticsearch module supports all basic query building
- feature as String,Abstract,Criteria or
- have
- it being derived from the
- method name.
-
+ @Inject
+ public void setRepository(ProductRepository repository) {
+ this.repository = repository;
+ }
+ }
+
+
+
+
+
+ Query methods
+
+ Query lookup strategies
+
+ The Elasticsearch module supports all basic query building
+ feature as String,Abstract,Criteria or
+ have
+ it being derived from the
+ method name.
+
-
- Declared queries
-
- Deriving the query from the method name is not always sufficient
- and/or may result in unreadable method names. In this case one
- might make either use of
- @Query
- annotation (see
-
- ).
-
-
-
+
+ Declared queries
+
+ Deriving the query from the method name is not always sufficient
+ and/or may result in unreadable method names. In this case one
+ might make either use of
+ @Query
+ annotation (see
+
+ ).
+
+
+
-
- Query creation
+
+ Query creation
-
- Generally the query creation mechanism for Elasticsearch works as
- described
- in
-
- . Here's a short example
- of what a Elasticsearch query method
- translates into:
-
- Query creation from method names
- public interface BookRepository extends Repository<Book, String> {
- List<Book> findByNameAndPrice(String name, Integer price);
-}
-
- The method name above will be translated into the following
- Elasticsearch json query
-
-
+
+ Generally the query creation mechanism for Elasticsearch works as
+ described
+ in
+
+ . Here's a short example
+ of what a Elasticsearch query method
+ translates into:
+
+ Query creation from method names
+ public interface BookRepository extends Repository<Book, String>
+ {
+ List<Book> findByNameAndPrice(String name, Integer price);
+ }
+
+
+ The method name above will be translated into the following
+ Elasticsearch json query
+
+
{ "bool" :
{ "must" :
[
{ "field" : {"name" : "?"} },
{ "field" : {"price" : "?"} }
- ] } }
-
-
-
- A list of supported keywords for Elasticsearch is shown below.
-
-
-
-
- Using @Query Annotation
-
-
- Declare query at the method using the
- @Query
- annotation.
-
+ ] } }
+
+
+
+
+ A list of supported keywords for Elasticsearch is shown below.
+
+
+
+
+ Using @Query Annotation
+
+
+ Declare query at the method using the
+ @Query
+ annotation.
+
- public interface BookRepository extends ElasticsearchRepository<Book, String> {
- @Query("{"bool" : {"must" : {"field" : {"name" : "?0"}}}}")
- Page<Book> findByName(String name,Pageable pageable);
-}
-
-
+ public interface BookRepository extends ElasticsearchRepository<Book,
+ String> {
+ @Query("{"bool" : {"must" : {"field" : {"name" : "?0"}}}}")
+ Page<Book> findByName(String name,Pageable pageable);
+ }
+
+
+
-
+
\ No newline at end of file
diff --git a/src/docbkx/reference/elasticsearch-misc.xml b/src/docbkx/reference/elasticsearch-misc.xml
index 78aece686..78096a523 100644
--- a/src/docbkx/reference/elasticsearch-misc.xml
+++ b/src/docbkx/reference/elasticsearch-misc.xml
@@ -1,85 +1,88 @@
+ "http://www.oasis-open.org/docbook/xml/4.4/docbookx.dtd">
- Miscellaneous Elasticsearch Operation Support
-
-
- This chapter covers additional support for Elasticsearch operations
- that cannot be directly accessed via the repository
- interface.
- It is
- recommended to add those operations as custom
- implementation as
- described in
-
- .
-
-
-
- Filter Builder
-
- Filter Builder improves query speed.
-
-
-
-private ElasticsearchTemplate elasticsearchTemplate;
-SearchQuery searchQuery = new NativeSearchQueryBuilder()
-.withQuery(matchAllQuery())
-.withFilter(boolFilter().must(termFilter("id", documentId)))
-.build();
-Page<SampleEntity> sampleEntities = elasticsearchTemplate.queryForPage(searchQuery,SampleEntity.class);
-
-
-
-
- Using Scan And Scroll For Big Result Set
-
- Elasticsearch has scan and scroll feature for getting big result set
- in chunks.
- ElasticsearchTemplate
- has scan and scroll methods that can be used as below.
-
-
-
- Using Scan and Scroll
-
-
-SearchQuery searchQuery = new NativeSearchQueryBuilder()
-.withQuery(matchAllQuery())
-.withIndices("test-index")
-.withTypes("test-type")
-.withPageable(new PageRequest(0,1))
-.build();
-String scrollId = elasticsearchTemplate.scan(searchQuery,1000,false);
-List<SampleEntity> sampleEntities = new ArrayList<SampleEntity>();
-boolean hasRecords = true;
-while (hasRecords){
-Page<SampleEntity> page = elasticsearchTemplate.scroll(scrollId, 5000L , new ResultsMapper<SampleEntity>() {
-@Override
-public Page<SampleEntity> mapResults(SearchResponse response) {
-List<SampleEntity> chunk = new ArrayList<SampleEntity>();
-for(SearchHit searchHit : response.getHits()){
- if(response.getHits().getHits().length <= 0) {
- return null;
- }
- SampleEntity user = new SampleEntity();
- user.setId(searchHit.getId());
- user.setMessage((String)searchHit.getSource().get("message"));
- chunk.add(user);
- }
- return new PageImpl<SampleEntity>(chunk);
-}
- });
- if(page != null) {
- sampleEntities.addAll(page.getContent());
- hasRecords = page.hasNextPage();
- }
- else{
- hasRecords = false;
- }
- }
-}
-
-
+ Miscellaneous Elasticsearch Operation Support
+
+
+ This chapter covers additional support for Elasticsearch operations
+ that cannot be directly accessed via the repository
+ interface.
+ It is
+ recommended to add those operations as custom
+ implementation as
+ described in
+
+ .
+
+
+
+ Filter Builder
+
+ Filter Builder improves query speed.
+
+
+
+ private ElasticsearchTemplate elasticsearchTemplate;
+ SearchQuery searchQuery = new NativeSearchQueryBuilder()
+ .withQuery(matchAllQuery())
+ .withFilter(boolFilter().must(termFilter("id", documentId)))
+ .build();
+ Page<SampleEntity> sampleEntities =
+ elasticsearchTemplate.queryForPage(searchQuery,SampleEntity.class);
+
+
+
+
+ Using Scan And Scroll For Big Result Set
+
+ Elasticsearch has scan and scroll feature for getting big result set
+ in chunks.
+ ElasticsearchTemplate
+ has scan and scroll methods that can be used as below.
+
+
+
+ Using Scan and Scroll
+
+
+ SearchQuery searchQuery = new NativeSearchQueryBuilder()
+ .withQuery(matchAllQuery())
+ .withIndices("test-index")
+ .withTypes("test-type")
+ .withPageable(new PageRequest(0,1))
+ .build();
+ String scrollId = elasticsearchTemplate.scan(searchQuery,1000,false);
+ List<SampleEntity> sampleEntities = new ArrayList<SampleEntity>();
+ boolean hasRecords = true;
+ while (hasRecords){
+ Page<SampleEntity> page = elasticsearchTemplate.scroll(scrollId, 5000L , new ResultsMapper<SampleEntity>()
+ {
+ @Override
+ public Page<SampleEntity> mapResults(SearchResponse response) {
+ List<SampleEntity> chunk = new ArrayList<SampleEntity>();
+ for(SearchHit searchHit : response.getHits()){
+ if(response.getHits().getHits().length <= 0) {
+ return null;
+ }
+ SampleEntity user = new SampleEntity();
+ user.setId(searchHit.getId());
+ user.setMessage((String)searchHit.getSource().get("message"));
+ chunk.add(user);
+ }
+ return new PageImpl<SampleEntity>(chunk);
+ }
+ });
+ if(page != null) {
+ sampleEntities.addAll(page.getContent());
+ hasRecords = page.hasNextPage();
+ }
+ else{
+ hasRecords = false;
+ }
+ }
+ }
+
+
+
\ No newline at end of file
diff --git a/src/docbkx/reference/repositories.xml b/src/docbkx/reference/repositories.xml
index b473c297e..25206728d 100644
--- a/src/docbkx/reference/repositories.xml
+++ b/src/docbkx/reference/repositories.xml
@@ -2,89 +2,89 @@
- Repositories
+ Repositories
-
- Introduction
+
+ Introduction
- Implementing a data access layer of an application has been
- cumbersome for quite a while. Too much boilerplate code had to be
- written.
- Domain classes were anemic and not designed in a real object oriented or
- domain driven manner.
-
+ Implementing a data access layer of an application has been
+ cumbersome for quite a while. Too much boilerplate code had to be
+ written.
+ Domain classes were anemic and not designed in a real object oriented or
+ domain driven manner.
+
- Using both of these technologies makes developers life a lot
- easier
- regarding rich domain model's persistence. Nevertheless the amount of
- boilerplate code to implement repositories especially is still quite
- high.
- So the goal of the repository abstraction of Spring Data is to reduce
- the
- effort to implement data access layers for various persistence stores
- significantly.
-
+ Using both of these technologies makes developers life a lot
+ easier
+ regarding rich domain model's persistence. Nevertheless the amount of
+ boilerplate code to implement repositories especially is still quite
+ high.
+ So the goal of the repository abstraction of Spring Data is to reduce
+ the
+ effort to implement data access layers for various persistence stores
+ significantly.
+
- The following chapters will introduce the core concepts and
- interfaces of Spring Data repositories in general for detailled
- information on the specific features of a particular store consult
- the
- later chapters of this document.
-
+ The following chapters will introduce the core concepts and
+ interfaces of Spring Data repositories in general for detailled
+ information on the specific features of a particular store consult
+ the
+ later chapters of this document.
+
-
- As this part of the documentation is pulled in from Spring Data
- Commons we have to decide for a particular module to be used as
- example.
- The configuration and code samples in this chapter are using the JPA
- module. Make sure you adapt e.g. the XML namespace declaration,
- types to
- be extended to the equivalents of the module you're actually
- using.
-
-
-
+
+ As this part of the documentation is pulled in from Spring Data
+ Commons we have to decide for a particular module to be used as
+ example.
+ The configuration and code samples in this chapter are using the JPA
+ module. Make sure you adapt e.g. the XML namespace declaration,
+ types to
+ be extended to the equivalents of the module you're actually
+ using.
+
+
+
-
- Core concepts
+
+ Core concepts
-
- The central interface in Spring Data repository abstraction is
- Repository
- (probably not that much of a
- surprise). It is typeable to the domain class to manage as well as the id
- type of the domain class. This interface mainly acts as marker interface
- to capture the types to deal with and help us when discovering
- interfaces
- that extend this one. Beyond that there's
- CrudRepository
- which provides some
- sophisticated functionality around CRUD for the entity being
- managed.
-
+
+ The central interface in Spring Data repository abstraction is
+ Repository
+ (probably not that much of a
+ surprise). It is typeable to the domain class to manage as well as the id
+ type of the domain class. This interface mainly acts as marker interface
+ to capture the types to deal with and help us when discovering
+ interfaces
+ that extend this one. Beyond that there's
+ CrudRepository
+ which provides some
+ sophisticated functionality around CRUD for the entity being
+ managed.
+
-
-
- CrudRepository
- interface
-
+
+
+ CrudRepository
+ interface
+
-
-
-
+
+
+
-
+
-
+
-
+
-
+
-
-
+
+
- public interface CrudRepository<T, ID extends Serializable>
+ public interface CrudRepository<T, ID extends Serializable>
extends Repository<T, ID> {
T save(T entity);
@@ -100,109 +100,116 @@
boolean exists(ID primaryKey);
// … more functionality omitted.
- }
+ }
+
-
-
- Saves the given entity.
-
+
+
+ Saves the given entity.
+
-
- Returns the entity identified by the given id.
-
+
+ Returns the entity identified by the given id.
+
-
- Returns all entities.
-
+
+ Returns all entities.
+
-
- Returns the number of entities.
-
+
+ Returns the number of entities.
+
-
- Deletes the given entity.
-
+
+ Deletes the given entity.
+
-
- Returns whether an entity with the given id exists.
-
-
-
-
+
+ Returns whether an entity with the given id exists.
+
+
+
+
- Usually we will have persistence technology specific
- sub-interfaces
- to include additional technology specific methods. We will now ship
- implementations for a variety of Spring Data modules that implement
- this
- interface.
-
+ Usually we will have persistence technology specific
+ sub-interfaces
+ to include additional technology specific methods. We will now ship
+ implementations for a variety of Spring Data modules that implement
+ this
+ interface.
+
-
- On top of the
- CrudRepository
- there is
- a
- PagingAndSortingRepository
- abstraction
- that adds additional methods to ease paginated access to entities:
-
+
+ On top of the
+ CrudRepository
+ there is
+ a
+ PagingAndSortingRepository
+ abstraction
+ that adds additional methods to ease paginated access to entities:
+
-
- PagingAndSortingRepository
+
+ PagingAndSortingRepository
- public interface PagingAndSortingRepository<T, ID extends Serializable> extends CrudRepository<T, ID> {
+ public interface PagingAndSortingRepository<T, ID extends Serializable>
+ extends CrudRepository<T, ID> {
Iterable<T> findAll(Sort sort);
Page<T> findAll(Pageable pageable);
- }
-
+ }
+
+
-
- Accessing the second page of
- User
- by a page
- size of 20 you could simply do something like this:
-
+
+ Accessing the second page of
+ User
+ by a page
+ size of 20 you could simply do something like this:
+
- PagingAndSortingRepository<User, Long> repository = // … get access to a bean
- Page<User> users = repository.findAll(new PageRequest(1, 20));
-
+ PagingAndSortingRepository<User, Long> repository = // … get access to a
+ bean
+ Page<User> users = repository.findAll(new PageRequest(1, 20));
+
+
-
- Query methods
+
+ Query methods
- Next to standard CRUD functionality repositories are usually
- queries
- on the underlying datastore. With Spring Data declaring those queries
- becomes a four-step process:
-
+ Next to standard CRUD functionality repositories are usually
+ queries
+ on the underlying datastore. With Spring Data declaring those queries
+ becomes a four-step process:
+
-
-
-
- Declare an interface extending
- Repository
- or one of its sub-interfaces
- and type it to the domain class it shall handle.
-
+
+
+
+ Declare an interface extending
+ Repository
+ or one of its sub-interfaces
+ and type it to the domain class it shall handle.
+
- public interface PersonRepository extends Repository<User, Long> { … }
-
+ public interface PersonRepository extends Repository<User, Long> {
+ … }
+
+
-
- Declare query methods on the interface.
+
+ Declare query methods on the interface.
- List<Person> findByLastname(String lastname);
-
+ List<Person> findByLastname(String lastname);
+
-
- Setup Spring to create proxy instances for those
- interfaces.
-
+
+ Setup Spring to create proxy instances for those
+ interfaces.
+
- <?xml version="1.0" encoding="UTF-8"?>
+ <?xml version="1.0" encoding="UTF-8"?>
<beans:beans xmlns:beans="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://www.springframework.org/schema/data/jpa"
@@ -215,87 +222,89 @@
</beans>
-
-
- Note that we use the JPA namespace here just by example. If
- you're using the repository abstraction for any other store you need
- to change this to the appropriate namespace declaration of your
- store module which should be exchanging
- jpa
- in favor of
- e.g.
- mongodb
- .
-
-
-
+
+
+ Note that we use the JPA namespace here just by example. If
+ you're using the repository abstraction for any other store you need
+ to change this to the appropriate namespace declaration of your
+ store module which should be exchanging
+ jpa
+ in favor of
+ e.g.
+ mongodb
+ .
+
+
+
-
- Get the repository instance injected and use it.
+
+ Get the repository instance injected and use it.
- public class SomeClient {
+ public class SomeClient {
@Autowired
private PersonRepository repository;
public void doSomething() {
List<Person> persons = repository.findByLastname("Matthews");
- }
-
-
+ }
+
+
+
- At this stage we barely scratched the surface of what's possible
- with the repositories but the general approach should be clear. Let's
- go
- through each of these steps and figure out details and various options
- that you have at each stage.
-
+ At this stage we barely scratched the surface of what's possible
+ with the repositories but the general approach should be clear. Let's
+ go
+ through each of these steps and figure out details and various options
+ that you have at each stage.
+
-
- Defining repository interfaces
+
+ Defining repository interfaces
-
- As a very first step you define a domain class specific repository
- interface. It's got to extend
- Repository
- and be typed to the domain class and an ID type. If you want to
- expose
- CRUD methods for that domain type, extend
- CrudRepository
- instead of
- Repository
- .
-
+
+ As a very first step you define a domain class specific repository
+ interface. It's got to extend
+ Repository
+ and be typed to the domain class and an ID type. If you want to
+ expose
+ CRUD methods for that domain type, extend
+ CrudRepository
+ instead of
+ Repository
+ .
+
-
- Fine tuning repository definition
+
+ Fine tuning repository definition
-
- Usually you will have your repository interface extend
- Repository
- ,
- CrudRepository
- or
- PagingAndSortingRepository
- . If you
- don't like extending Spring Data interfaces at all you can also
- annotate your repository interface with
- @RepositoryDefinition
- . Extending
- CrudRepository
- will expose a complete
- set of methods to manipulate your entities. If you would rather be
- selective about the methods being exposed, simply copy the ones you
- want to expose from
- CrudRepository
- into
- your domain repository.
-
+
+ Usually you will have your repository interface extend
+ Repository
+ ,
+ CrudRepository
+ or
+ PagingAndSortingRepository
+ . If you
+ don't like extending Spring Data interfaces at all you can also
+ annotate your repository interface with
+ @RepositoryDefinition
+ . Extending
+ CrudRepository
+ will expose a complete
+ set of methods to manipulate your entities. If you would rather be
+ selective about the methods being exposed, simply copy the ones you
+ want to expose from
+ CrudRepository
+ into
+ your domain repository.
+
-
- Selectively exposing CRUD methods
+
+ Selectively exposing CRUD methods
- interface MyBaseRepository<T, ID extends Serializable> extends Repository<T, ID> {
+ interface MyBaseRepository<T, ID extends Serializable> extends
+ Repository<T, ID> {
T findOne(ID id);
T save(T entity);
}
@@ -303,308 +312,314 @@
interface UserRepository extends MyBaseRepository<User, Long> {
User findByEmailAddress(EmailAddress emailAddress);
- }
-
-
-
- In the first step we define a common base interface for all our
- domain repositories and expose
- findOne(…)
- as
- well as
- save(…)
- .These methods will be routed
- into the base repository implementation of the store of your choice
- because they are matching the method signatures in
- CrudRepository
- . So our
- UserRepository
- will now be able to save
- users, find single ones by id as well as triggering a query to find
- User
- s by their email address.
-
-
-
-
-
- Defining query methods
-
-
- Query lookup strategies
-
- The next thing we have to discuss is the definition of query
- methods. There are two main ways that the repository proxy is able
- to
- come up with the store specific query from the method name. The first
- option is to derive the query from the method name directly, the
- second is using some kind of additionally created query. What
- detailed
- options are available pretty much depends on the actual store,
- however, there's got to be some algorithm that decides what actual
- query is created.
-
-
-
- There are three strategies available for the repository
- infrastructure to resolve the query. The strategy to be used can be
- configured at the namespace through the
- query-lookup-strategy
- attribute. However, It might be the
- case that some of the strategies are not supported for specific
- datastores. Here are your options:
-
-
-
- CREATE
-
-
- This strategy will try to construct a store specific query
- from the query method's name. The general approach is to remove a
- given set of well-known prefixes from the method name and parse
- the
- rest of the method. Read more about query construction in
-
- .
-
-
-
-
- USE_DECLARED_QUERY
-
- This strategy tries to find a declared query which will be
- used for execution first. The query could be defined by an
- annotation somewhere or declared by other means. Please consult
- the
- documentation of the specific store to find out what options are
- available for that store. If the repository infrastructure does not
- find a declared query for the method at bootstrap time it will
- fail.
-
-
-
-
- CREATE_IF_NOT_FOUND (default)
-
-
- This strategy is actually a combination of
- CREATE
- and
- USE_DECLARED_QUERY
- . It will try to lookup a
- declared query first but create a custom method name based query if
- no declared query was found. This is the default lookup strategy and
- thus will be used if you don't configure anything explicitly. It
- allows quick query definition by method names but also custom
- tuning
- of these queries by introducing declared queries as needed.
-
-
-
-
-
- Query creation
-
-
- The query builder mechanism built into Spring Data repository
- infrastructure is useful to build constraining queries over
- entities
- of the repository. We will strip the prefixes
- findBy
- ,
- find
- ,
- readBy
- ,
- read
- ,
- getBy
- as well as
- get
- from the method and
- start parsing the rest of it. At a very basic level you can define
- conditions on entity properties and concatenate them with
- AND
- and
- OR
- .
-
-
-
- Query creation from method names
-
-
- public interface PersonRepository extends Repository<User, Long> {
-
- List<Person> findByEmailAddressAndLastname(EmailAddress emailAddress, String lastname);
- }
-
-
-
-
- The actual result of parsing that method will of course depend
- on the persistence store we create the query for, however, there are
- some general things to notice. The expressions are usually property
- traversals combined with operators that can be concatenated. As you
- can see in the example you can combine property expressions with
- And
- and Or. Beyond that you also get support for various operators like
- Between
- ,
- LessThan
- ,
- GreaterThan
- ,
- Like
- for the
- property expressions. As the operators supported can vary from
- datastore to datastore please consult the according part of the
- reference documentation.
-
-
-
- Property expressions
-
-
- Property expressions can just refer to a direct property of
- the managed entity (as you just saw in the example above). On query
- creation time we already make sure that the parsed property is at
- a
- property of the managed domain class. However, you can also define
- constraints by traversing nested properties. Assume
- Person
- s have
- Address
- es
- with
- ZipCode
- s. In that case a method name
- of
-
-
- List<Person> findByAddressZipCode(ZipCode zipCode);
-
-
- will create the property traversal
- x.address.zipCode
- . The resolution algorithm starts with
- interpreting the entire part (
- AddressZipCode
- ) as
- property and checks the domain class for a property with that name
- (uncapitalized). If it succeeds it just uses that. If not it
- starts
- splitting up the source at the camel case parts from the right side
- into a head and a tail and tries to find the according property,
- e.g.
- AddressZip
- and
- Code
- . If
- we find a property with that head we take the tail and continue
- building the tree down from there. As in our case the first split
- does not match we move the split point to the left
- (
- Address
- ,
- ZipCode
- ).
-
-
-
- Although this should work for most cases, there might be cases
- where the algorithm could select the wrong property. Suppose our
- Person
- class has an
- addressZip
- property as well. Then our algorithm would match in the first
- split
- round already and essentially choose the wrong property and finally
- fail (as the type of
- addressZip
- probably has
- no code property). To resolve this ambiguity you can use
- _
- inside your method name to manually define
- traversal points. So our method name would end up like so:
-
-
- List<Person> findByAddress_ZipCode(ZipCode zipCode);
+ }
-
-
+
-
- Special parameter handling
+
+ In the first step we define a common base interface for all our
+ domain repositories and expose
+ findOne(…)
+ as
+ well as
+ save(…)
+ .These methods will be routed
+ into the base repository implementation of the store of your choice
+ because they are matching the method signatures in
+ CrudRepository
+ . So our
+ UserRepository
+ will now be able to save
+ users, find single ones by id as well as triggering a query to find
+ User
+ s by their email address.
+
+
+
- To hand parameters to your query you simply define method
- parameters as already seen in the examples above. Besides that we
- will
- recognizes certain specific types to apply pagination and sorting to
- your queries dynamically.
-
+
+ Defining query methods
-
- Using Pageable and Sort in query methods
+
+ Query lookup strategies
- Page<User> findByLastname(String lastname, Pageable pageable);
+ The next thing we have to discuss is the definition of query
+ methods. There are two main ways that the repository proxy is able
+ to
+ come up with the store specific query from the method name. The first
+ option is to derive the query from the method name directly, the
+ second is using some kind of additionally created query. What
+ detailed
+ options are available pretty much depends on the actual store,
+ however, there's got to be some algorithm that decides what actual
+ query is created.
+
+
+
+ There are three strategies available for the repository
+ infrastructure to resolve the query. The strategy to be used can be
+ configured at the namespace through the
+ query-lookup-strategy
+ attribute. However, It might be the
+ case that some of the strategies are not supported for specific
+ datastores. Here are your options:
+
+
+
+ CREATE
+
+
+ This strategy will try to construct a store specific query
+ from the query method's name. The general approach is to remove a
+ given set of well-known prefixes from the method name and parse
+ the
+ rest of the method. Read more about query construction in
+
+ .
+
+
+
+
+ USE_DECLARED_QUERY
+
+ This strategy tries to find a declared query which will be
+ used for execution first. The query could be defined by an
+ annotation somewhere or declared by other means. Please consult
+ the
+ documentation of the specific store to find out what options are
+ available for that store. If the repository infrastructure does not
+ find a declared query for the method at bootstrap time it will
+ fail.
+
+
+
+
+ CREATE_IF_NOT_FOUND (default)
+
+
+ This strategy is actually a combination of
+ CREATE
+ and
+ USE_DECLARED_QUERY
+ . It will try to lookup a
+ declared query first but create a custom method name based query if
+ no declared query was found. This is the default lookup strategy and
+ thus will be used if you don't configure anything explicitly. It
+ allows quick query definition by method names but also custom
+ tuning
+ of these queries by introducing declared queries as needed.
+
+
+
+
+
+ Query creation
+
+
+ The query builder mechanism built into Spring Data repository
+ infrastructure is useful to build constraining queries over
+ entities
+ of the repository. We will strip the prefixes
+ findBy
+ ,
+ find
+ ,
+ readBy
+ ,
+ read
+ ,
+ getBy
+ as well as
+ get
+ from the method and
+ start parsing the rest of it. At a very basic level you can define
+ conditions on entity properties and concatenate them with
+ AND
+ and
+ OR
+ .
+
+
+
+ Query creation from method names
+
+
+ public interface PersonRepository extends Repository<User,
+ Long> {
+
+ List<Person> findByEmailAddressAndLastname(EmailAddress emailAddress, String
+ lastname);
+ }
+
+
+
+
+
+ The actual result of parsing that method will of course depend
+ on the persistence store we create the query for, however, there are
+ some general things to notice. The expressions are usually property
+ traversals combined with operators that can be concatenated. As you
+ can see in the example you can combine property expressions with
+ And
+ and Or. Beyond that you also get support for various operators like
+ Between
+ ,
+ LessThan
+ ,
+ GreaterThan
+ ,
+ Like
+ for the
+ property expressions. As the operators supported can vary from
+ datastore to datastore please consult the according part of the
+ reference documentation.
+
+
+
+ Property expressions
+
+
+ Property expressions can just refer to a direct property of
+ the managed entity (as you just saw in the example above). On query
+ creation time we already make sure that the parsed property is at
+ a
+ property of the managed domain class. However, you can also define
+ constraints by traversing nested properties. Assume
+ Person
+ s have
+ Address
+ es
+ with
+ ZipCode
+ s. In that case a method name
+ of
+
+
+ List<Person> findByAddressZipCode(ZipCode zipCode);
+
+
+
+ will create the property traversal
+ x.address.zipCode
+ . The resolution algorithm starts with
+ interpreting the entire part (
+ AddressZipCode
+ ) as
+ property and checks the domain class for a property with that name
+ (uncapitalized). If it succeeds it just uses that. If not it
+ starts
+ splitting up the source at the camel case parts from the right side
+ into a head and a tail and tries to find the according property,
+ e.g.
+ AddressZip
+ and
+ Code
+ . If
+ we find a property with that head we take the tail and continue
+ building the tree down from there. As in our case the first split
+ does not match we move the split point to the left
+ (
+ Address
+ ,
+ ZipCode
+ ).
+
+
+
+ Although this should work for most cases, there might be cases
+ where the algorithm could select the wrong property. Suppose our
+ Person
+ class has an
+ addressZip
+ property as well. Then our algorithm would match in the first
+ split
+ round already and essentially choose the wrong property and finally
+ fail (as the type of
+ addressZip
+ probably has
+ no code property). To resolve this ambiguity you can use
+ _
+ inside your method name to manually define
+ traversal points. So our method name would end up like so:
+
+
+ List<Person> findByAddress_ZipCode(ZipCode zipCode);
+
+
+
+
+
+ Special parameter handling
+
+ To hand parameters to your query you simply define method
+ parameters as already seen in the examples above. Besides that we
+ will
+ recognizes certain specific types to apply pagination and sorting to
+ your queries dynamically.
+
+
+
+ Using Pageable and Sort in query methods
+
+ Page<User> findByLastname(String lastname, Pageable pageable);
List<User> findByLastname(String lastname, Sort sort);
- List<User> findByLastname(String lastname, Pageable pageable);
-
+ List<User> findByLastname(String lastname, Pageable pageable);
+
+
-
- The first method allows you to pass a
- Pageable
- instance to the query method to dynamically add paging to your
- statically defined query.
- Sorting
- options are handed via
- the
- Pageable
- instance too. If you only
- need sorting, simply add a
- Sort
- parameter to your method.
- As you also can see, simply returning a
- List
- is possible as well. We will then
- not retrieve the additional metadata required to build the actual
- Page
- instance but rather simply
- restrict the query to lookup only the given range of entities.
-
+
+ The first method allows you to pass a
+ Pageable
+ instance to the query method to dynamically add paging to your
+ statically defined query.
+ Sorting
+ options are handed via
+ the
+ Pageable
+ instance too. If you only
+ need sorting, simply add a
+ Sort
+ parameter to your method.
+ As you also can see, simply returning a
+ List
+ is possible as well. We will then
+ not retrieve the additional metadata required to build the actual
+ Page
+ instance but rather simply
+ restrict the query to lookup only the given range of entities.
+
-
- To find out how many pages you get for a query entirely we
- have to trigger an additional count query. This will be derived
- from
- the query you actually trigger by default.
-
-
-
-
+
+ To find out how many pages you get for a query entirely we
+ have to trigger an additional count query. This will be derived
+ from
+ the query you actually trigger by default.
+
+
+
+
-
- Creating repository instances
+
+ Creating repository instances
- So now the question is how to create instances and bean
- definitions for the repository interfaces defined.
-
+ So now the question is how to create instances and bean
+ definitions for the repository interfaces defined.
+
-
- XML Configuration
+
+ XML Configuration
- The easiest way to do so is by using the Spring namespace that
- is shipped with each Spring Data module that supports the
- repository
- mechanism. Each of those includes a repositories element that allows
- you to simply define a base package that Spring will scan for
- you.
-
+ The easiest way to do so is by using the Spring namespace that
+ is shipped with each Spring Data module that supports the
+ repository
+ mechanism. Each of those includes a repositories element that allows
+ you to simply define a base package that Spring will scan for
+ you.
+
- <?xml version="1.0" encoding="UTF-8"?>
+ <?xml version="1.0" encoding="UTF-8"?>
<beans:beans xmlns:beans="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://www.springframework.org/schema/data/jpa"
@@ -617,109 +632,110 @@
</beans:beans>
-
- In this case we instruct Spring to scan
- com.acme.repositories
- and all its sub packages for
- interfaces extending
- Repository
- or one
- of its sub-interfaces. For each interface found it will register the
- persistence technology specific
- FactoryBean
- to create the according
- proxies that handle invocations of the query methods. Each of these
- beans will be registered under a bean name that is derived from the
- interface name, so an interface of
- UserRepository
- would be registered
- under
- userRepository
- . The
- base-package
- attribute allows the use of wildcards, so that you can have a
- pattern
- of scanned packages.
-
+
+ In this case we instruct Spring to scan
+ com.acme.repositories
+ and all its sub packages for
+ interfaces extending
+ Repository
+ or one
+ of its sub-interfaces. For each interface found it will register the
+ persistence technology specific
+ FactoryBean
+ to create the according
+ proxies that handle invocations of the query methods. Each of these
+ beans will be registered under a bean name that is derived from the
+ interface name, so an interface of
+ UserRepository
+ would be registered
+ under
+ userRepository
+ . The
+ base-package
+ attribute allows the use of wildcards, so that you can have a
+ pattern
+ of scanned packages.
+
-
- Using filters
+
+ Using filters
-
- By default we will pick up every interface extending the
- persistence technology specific
- Repository
- sub-interface located
- underneath the configured base package and create a bean instance
- for it. However, you might want finer grained control over which
- interfaces bean instances get created for. To do this we support
- the
- use of
- <include-filter />
- and
- <exclude-filter />
- elements inside
- <repositories />
- . The semantics are exactly
- equivalent to the elements in Spring's context namespace. For
- details see
- Spring reference documentation
- on these
- elements.
-
+
+ By default we will pick up every interface extending the
+ persistence technology specific
+ Repository
+ sub-interface located
+ underneath the configured base package and create a bean instance
+ for it. However, you might want finer grained control over which
+ interfaces bean instances get created for. To do this we support
+ the
+ use of
+ <include-filter />
+ and
+ <exclude-filter />
+ elements inside
+ <repositories />
+ . The semantics are exactly
+ equivalent to the elements in Spring's context namespace. For
+ details see
+ Spring reference documentation
+
+ on these
+ elements.
+
- E.g. to exclude certain interfaces from instantiation as
- repository, you could use the following configuration:
-
+ E.g. to exclude certain interfaces from instantiation as
+ repository, you could use the following configuration:
+
-
- Using exclude-filter element
+
+ Using exclude-filter element
- <repositories base-package="com.acme.repositories">
+ <repositories base-package="com.acme.repositories">
<context:exclude-filter type="regex" expression=".*SomeRepository" />
</repositories>
-
- This would exclude all interfaces ending in
- SomeRepository
- from being
- instantiated.
-
-
-
-
+
+ This would exclude all interfaces ending in
+ SomeRepository
+ from being
+ instantiated.
+
+
+
+
-
- JavaConfig
+
+ JavaConfig
-
- The repository infrastructure can also be triggered using a
- store-specific
- @Enable${store}Repositories
- annotation
- on a JavaConfig class. For an introduction into Java based
- configuration of the Spring container please have a look at the
- reference documentation.
-
-
- JavaConfig in the Spring reference documentation -
-
-
-
-
+
+ The repository infrastructure can also be triggered using a
+ store-specific
+ @Enable${store}Repositories
+ annotation
+ on a JavaConfig class. For an introduction into Java based
+ configuration of the Spring container please have a look at the
+ reference documentation.
+
+
+ JavaConfig in the Spring reference documentation -
+
+
+
+
- A sample configuration to enable Spring Data repositories
- would
- look something like this.
-
+ A sample configuration to enable Spring Data repositories
+ would
+ look something like this.
+
-
- Sample annotation based repository configuration
+
+ Sample annotation based repository configuration
- @Configuration
+ @Configuration
@EnableJpaRepositories("com.acme.repositories")
class ApplicationConfiguration {
@@ -727,232 +743,240 @@
public EntityManagerFactory entityManagerFactory() {
// …
}
- }
-
+ }
+
+
-
- Note that the sample uses the JPA specific annotation which
- would have to be exchanged dependingon which store module you actually
- use. The same applies to the definition of the
- EntityManagerFactory
- bean. Please
- consult the sections covering the store-specific configuration.
-
-
+
+ Note that the sample uses the JPA specific annotation which
+ would have to be exchanged dependingon which store module you actually
+ use. The same applies to the definition of the
+ EntityManagerFactory
+ bean. Please
+ consult the sections covering the store-specific configuration.
+
+
-
- Standalone usage
+
+ Standalone usage
-
- You can also use the repository infrastructure outside of a
- Spring container usage. You will still need to have some of the Spring
- libraries on your classpath but you can generally setup
- repositories
- programmatically as well. The Spring Data modules providing repository
- support ship a persistence technology specific
- RepositoryFactory
- that can be used as
- follows:
-
+
+ You can also use the repository infrastructure outside of a
+ Spring container usage. You will still need to have some of the Spring
+ libraries on your classpath but you can generally setup
+ repositories
+ programmatically as well. The Spring Data modules providing repository
+ support ship a persistence technology specific
+ RepositoryFactory
+ that can be used as
+ follows:
+
-
- Standalone usage of repository factory
+
+ Standalone usage of repository factory
- RepositoryFactorySupport factory = … // Instantiate factory here
- UserRepository repository = factory.getRepository(UserRepository.class);
-
-
-
-
+ RepositoryFactorySupport factory = … // Instantiate factory here
+ UserRepository repository = factory.getRepository(UserRepository.class);
+
+
+
+
+
-
- Custom implementations
+
+ Custom implementations
-
- Adding behaviour to single repositories
+
+ Adding behaviour to single repositories
- Often it is necessary to provide a custom implementation for a
- few
- repository methods. Spring Data repositories easily allow you to provide
- custom repository code and integrate it with generic CRUD
- abstraction
- and query method functionality. To enrich a repository with custom
- functionality you have to define an interface and an implementation
- for
- that functionality first and let the repository interface you provided
- so far extend that custom interface.
-
+ Often it is necessary to provide a custom implementation for a
+ few
+ repository methods. Spring Data repositories easily allow you to provide
+ custom repository code and integrate it with generic CRUD
+ abstraction
+ and query method functionality. To enrich a repository with custom
+ functionality you have to define an interface and an implementation
+ for
+ that functionality first and let the repository interface you provided
+ so far extend that custom interface.
+
-
- Interface for custom repository functionality
+
+ Interface for custom repository functionality
- interface UserRepositoryCustom {
+ interface UserRepositoryCustom {
public void someCustomMethod(User user);
- }
-
-
-
- Implementation of custom repository functionality
-
-
- class UserRepositoryImpl implements UserRepositoryCustom {
-
- public void someCustomMethod(User user) {
- // Your custom implementation
}
- }
- Note that the implementation itself does not depend on
- Spring Data and can be a regular Spring bean. So you can use standard
- dependency injection behaviour to inject references to other beans,
- take part in aspects and so on.
-
-
+
+
-
- Changes to the your basic repository interface
+
+ Implementation of custom repository functionality
-
- public interface UserRepository extends CrudRepository<User, Long>, UserRepositoryCustom {
+
+ class UserRepositoryImpl implements UserRepositoryCustom {
- // Declare query methods here
- }
- Let your standard repository interface extend the custom
- one. This makes CRUD and custom functionality available to
- clients.
-
-
+ public void someCustomMethod(User user) {
+ // Your custom implementation
+ }
+ }
+
+ Note that the implementation itself does not depend on
+ Spring Data and can be a regular Spring bean. So you can use standard
+ dependency injection behaviour to inject references to other beans,
+ take part in aspects and so on.
+
+
-
- Configuration
+
+ Changes to the your basic repository interface
-
- If you use namespace configuration the repository infrastructure
- tries to autodetect custom implementations by looking up classes in
- the package we found a repository using the naming conventions
- appending the namespace element's attribute
- repository-impl-postfix
- to the classname. This suffix
- defaults to
- Impl
- .
-
+
+ public interface UserRepository extends CrudRepository<User, Long>,
+ UserRepositoryCustom {
-
- Configuration example
+ // Declare query methods here
+ }
+
+ Let your standard repository interface extend the custom
+ one. This makes CRUD and custom functionality available to
+ clients.
+
+
-
- <repositories base-package="com.acme.repository" />
+
+ Configuration
- <repositories base-package="com.acme.repository" repository-impl-postfix="FooBar" />
-
-
+
+ If you use namespace configuration the repository infrastructure
+ tries to autodetect custom implementations by looking up classes in
+ the package we found a repository using the naming conventions
+ appending the namespace element's attribute
+ repository-impl-postfix
+ to the classname. This suffix
+ defaults to
+ Impl
+ .
+
-
- The first configuration example will try to lookup a class
- com.acme.repository.UserRepositoryImpl
- to act
- as custom repository implementation, where the second example will
- try
- to lookup
- com.acme.repository.UserRepositoryFooBar
- .
-
-
+
+ Configuration example
-
- Manual wiring
+
+ <repositories base-package="com.acme.repository" />
- The approach above works perfectly well if your custom
- implementation uses annotation based configuration and autowiring
- entirely as it will be treated as any other Spring bean. If your
- custom implementation bean needs some special wiring you simply
- declare the bean and name it after the conventions just described.
- We
- will then pick up the custom bean by name rather than creating an
- instance.
-
+ <repositories base-package="com.acme.repository" repository-impl-postfix="FooBar"
+ />
+
+
-
- Manual wiring of custom implementations (I)
+
+ The first configuration example will try to lookup a class
+ com.acme.repository.UserRepositoryImpl
+ to act
+ as custom repository implementation, where the second example will
+ try
+ to lookup
+ com.acme.repository.UserRepositoryFooBar
+ .
+
+
- <repositories base-package="com.acme.repository" />
+
+ Manual wiring
+
+ The approach above works perfectly well if your custom
+ implementation uses annotation based configuration and autowiring
+ entirely as it will be treated as any other Spring bean. If your
+ custom implementation bean needs some special wiring you simply
+ declare the bean and name it after the conventions just described.
+ We
+ will then pick up the custom bean by name rather than creating an
+ instance.
+
+
+
+ Manual wiring of custom implementations (I)
+
+ <repositories base-package="com.acme.repository" />
<beans:bean id="userRepositoryImpl" class="…">
<!-- further configuration -->
</beans:bean>
-
-
-
+
+
+
-
- Adding custom behaviour to all repositories
+
+ Adding custom behaviour to all repositories
- In other cases you might want to add a single method to all of
- your repository interfaces. So the approach just shown is not
- feasible.
- The first step to achieve this is adding and intermediate interface to
- declare the shared behaviour
-
+ In other cases you might want to add a single method to all of
+ your repository interfaces. So the approach just shown is not
+ feasible.
+ The first step to achieve this is adding and intermediate interface to
+ declare the shared behaviour
+
-
- An interface declaring custom shared behaviour
+
+ An interface declaring custom shared behaviour
-
-
- public interface MyRepository<T, ID extends Serializable>
- extends JpaRepository<T, ID> {
+
+
+ public interface MyRepository<T, ID extends Serializable>
+ extends JpaRepository<T, ID> {
- void sharedCustomMethod(ID id);
- }
-
-
+ void sharedCustomMethod(ID id);
+ }
+
+
+
-
- Now your individual repository interfaces will extend this
- intermediate interface instead of the
- Repository
- interface to include the
- functionality declared. The second step is to create an implementation
- of this interface that extends the persistence technology specific
- repository base class which will then act as a custom base class for
- the
- repository proxies.
-
+
+ Now your individual repository interfaces will extend this
+ intermediate interface instead of the
+ Repository
+ interface to include the
+ functionality declared. The second step is to create an implementation
+ of this interface that extends the persistence technology specific
+ repository base class which will then act as a custom base class for
+ the
+ repository proxies.
+
-
-
- The default behaviour of the Spring
- <repositories
- />
- namespace is to provide an implementation for all
- interfaces that fall under the
- base-package
- . This means
- that if left in it's current state, an implementation instance of
- MyRepository
- will be created by Spring.
- This is of course not desired as it is just supposed to act as an
- intermediary between
- Repository
- and the
- actual repository interfaces you want to define for each entity. To
- exclude an interface extending
- Repository
- from being instantiated as a
- repository instance it can either be annotate it with
- @NoRepositoryBean
- or moved out side of
- the configured
- base-package
- .
-
-
+
+
+ The default behaviour of the Spring
+ <repositories
+ />
+ namespace is to provide an implementation for all
+ interfaces that fall under the
+ base-package
+ . This means
+ that if left in it's current state, an implementation instance of
+ MyRepository
+ will be created by Spring.
+ This is of course not desired as it is just supposed to act as an
+ intermediary between
+ Repository
+ and the
+ actual repository interfaces you want to define for each entity. To
+ exclude an interface extending
+ Repository
+ from being instantiated as a
+ repository instance it can either be annotate it with
+ @NoRepositoryBean
+ or moved out side of
+ the configured
+ base-package
+ .
+
+
-
- Custom repository base class
+
+ Custom repository base class
-
+
public class MyRepositoryImpl<T, ID extends Serializable>
extends SimpleJpaRepository<T, ID> implements MyRepository<T, ID> {
@@ -969,32 +993,34 @@
public void sharedCustomMethod(ID id) {
// implementation goes here
}
- }
-
+ }
+
+
-
- The last step is to create a custom repository factory to replace
- the default
- RepositoryFactoryBean
- that will in
- turn produce a custom
- RepositoryFactory
- . The new
- repository factory will then provide your
- MyRepositoryImpl
- as the implementation of any
- interfaces that extend the
- Repository
- interface, replacing the
- SimpleJpaRepository
- implementation you just extended.
-
+
+ The last step is to create a custom repository factory to replace
+ the default
+ RepositoryFactoryBean
+ that will in
+ turn produce a custom
+ RepositoryFactory
+ . The new
+ repository factory will then provide your
+ MyRepositoryImpl
+ as the implementation of any
+ interfaces that extend the
+ Repository
+ interface, replacing the
+ SimpleJpaRepository
+ implementation you just extended.
+
-
- Custom repository factory bean
+
+ Custom repository factory bean
-
- public class MyRepositoryFactoryBean<R extends JpaRepository<T, I>, T, I extends Serializable>
+
+ public class MyRepositoryFactoryBean<R extends JpaRepository<T, I>, T, I extends
+ Serializable>
extends JpaRepositoryFactoryBean<R, T, I> {
protected RepositoryFactorySupport createRepositoryFactory(EntityManager entityManager) {
@@ -1002,7 +1028,8 @@
return new MyRepositoryFactory(entityManager);
}
- private static class MyRepositoryFactory<T, I extends Serializable> extends JpaRepositoryFactory {
+ private static class MyRepositoryFactory<T, I extends Serializable> extends
+ JpaRepositoryFactory {
private EntityManager entityManager;
@@ -1024,49 +1051,50 @@
return MyRepository.class;
}
}
- }
-
+ }
+
+
-
- Finally you can either declare beans of the custom factory
- directly or use the
- factory-class
- attribute of the Spring
- namespace to tell the repository infrastructure to use your custom
- factory implementation.
-
+
+ Finally you can either declare beans of the custom factory
+ directly or use the
+ factory-class
+ attribute of the Spring
+ namespace to tell the repository infrastructure to use your custom
+ factory implementation.
+
-
- Using the custom factory with the namespace
+
+ Using the custom factory with the namespace
- <repositories base-package="com.acme.repository"
+ <repositories base-package="com.acme.repository"
factory-class="com.acme.MyRepositoryFactoryBean" />
-
-
-
+
+
+
-
- Extensions
+
+ Extensions
- This chapter documents a set of Spring Data extensions that
- enable
- Spring Data usage in a variety of contexts. Currently most of the
- integration is targeted towards Spring MVC.
-
+ This chapter documents a set of Spring Data extensions that
+ enable
+ Spring Data usage in a variety of contexts. Currently most of the
+ integration is targeted towards Spring MVC.
+
-
- Domain class web binding for Spring MVC
+
+ Domain class web binding for Spring MVC
- Given you are developing a Spring MVC web applications you
- typically have to resolve domain class ids from URLs. By default
- it's
- your task to transform that request parameter or URL part into the
- domain class to hand it layers below then or execute business logic
- on
- the entities directly. This should look something like this:
-
+ Given you are developing a Spring MVC web applications you
+ typically have to resolve domain class ids from URLs. By default
+ it's
+ your task to transform that request parameter or URL part into the
+ domain class to hand it layers below then or execute business logic
+ on
+ the entities directly. This should look something like this:
+
- @Controller
+ @Controller
@RequestMapping("/users")
public class UserController {
@@ -1085,41 +1113,43 @@
// Populate model
return "user";
}
- }
+ }
+
-
- First you pretty much have to declare a repository dependency for
- each controller to lookup the entity managed by the controller or
- repository respectively. Beyond that looking up the entity is
- boilerplate as well as it's always a
- findOne(…)
- call. Fortunately Spring provides means to register custom
- converting
- components that allow conversion between a
- String
- value to an arbitrary type.
-
+
+ First you pretty much have to declare a repository dependency for
+ each controller to lookup the entity managed by the controller or
+ repository respectively. Beyond that looking up the entity is
+ boilerplate as well as it's always a
+ findOne(…)
+ call. Fortunately Spring provides means to register custom
+ converting
+ components that allow conversion between a
+ String
+ value to an arbitrary type.
+
-
- PropertyEditors
+
+ PropertyEditors
-
- For versions up to Spring 3.0 simple Java
- PropertyEditor
- s had to be used. Thus,
- we offer a
- DomainClassPropertyEditorRegistrar
- ,
- that will look up all Spring Data repositories registered in the
- ApplicationContext
- and register a
- custom
- PropertyEditor
- for the managed
- domain class
-
+
+ For versions up to Spring 3.0 simple Java
+ PropertyEditor
+ s had to be used. Thus,
+ we offer a
+ DomainClassPropertyEditorRegistrar
+ ,
+ that will look up all Spring Data repositories registered in the
+ ApplicationContext
+ and register a
+ custom
+ PropertyEditor
+ for the managed
+ domain class
+
- <bean class="….web.servlet.mvc.annotation.AnnotationMethodHandlerAdapter">
+ <bean
+ class="….web.servlet.mvc.annotation.AnnotationMethodHandlerAdapter">
<property name="webBindingInitializer">
<bean class="….web.bind.support.ConfigurableWebBindingInitializer">
<property name="propertyEditorRegistrars">
@@ -1129,12 +1159,12 @@
</property>
</bean>
- If you have configured Spring MVC like this you can turn your
- controller into the following that reduces a lot of the clutter and
- boilerplate.
-
+ If you have configured Spring MVC like this you can turn your
+ controller into the following that reduces a lot of the clutter and
+ boilerplate.
+
- @Controller
+ @Controller
@RequestMapping("/users")
public class UserController {
@@ -1145,33 +1175,34 @@
// Populate model
return "userForm";
}
- }
-
+ }
+
+
-
- ConversionService
+
+ ConversionService
-
- As of Spring 3.0 the
- PropertyEditor
- support is superseeded
- by a new conversion infrstructure that leaves all the drawbacks of
- PropertyEditor
- s behind and uses a
- stateless X to Y conversion approach. We now ship with a
- DomainClassConverter
- that pretty much mimics
- the behaviour of
- DomainClassPropertyEditorRegistrar
- . To register
- the converter you have to declare
- ConversionServiceFactoryBean
- , register the
- converter and tell the Spring MVC namespace to use the configured
- conversion service:
-
+
+ As of Spring 3.0 the
+ PropertyEditor
+ support is superseeded
+ by a new conversion infrstructure that leaves all the drawbacks of
+ PropertyEditor
+ s behind and uses a
+ stateless X to Y conversion approach. We now ship with a
+ DomainClassConverter
+ that pretty much mimics
+ the behaviour of
+ DomainClassPropertyEditorRegistrar
+ . To register
+ the converter you have to declare
+ ConversionServiceFactoryBean
+ , register the
+ converter and tell the Spring MVC namespace to use the configured
+ conversion service:
+
- <mvc:annotation-driven conversion-service="conversionService" />
+ <mvc:annotation-driven conversion-service="conversionService" />
<bean id="conversionService" class="….context.support.ConversionServiceFactoryBean">
<property name="converters">
@@ -1182,13 +1213,13 @@
</list>
</property>
</bean>
-
-
+
+
-
- Web pagination
+
+ Web pagination
- @Controller
+ @Controller
@RequestMapping("/users")
public class UserController {
@@ -1202,24 +1233,25 @@
model.addAttribute("users", userService.getUsers(pageable));
return "users";
}
- }
+ }
+
-
- As you can see the naive approach requires the method to contain
- an
- HttpServletRequest
- parameter that has
- to be parsed manually. We even omitted an appropriate failure handling
- which would make the code even more verbose. The bottom line is that
- the
- controller actually shouldn't have to handle the functionality of
- extracting pagination information from the request. So we include a
- PageableArgumentResolver
- that will do the work
- for you.
-
+
+ As you can see the naive approach requires the method to contain
+ an
+ HttpServletRequest
+ parameter that has
+ to be parsed manually. We even omitted an appropriate failure handling
+ which would make the code even more verbose. The bottom line is that
+ the
+ controller actually shouldn't have to handle the functionality of
+ extracting pagination information from the request. So we include a
+ PageableArgumentResolver
+ that will do the work
+ for you.
+
- <bean class="….web.servlet.mvc.annotation.AnnotationMethodHandlerAdapter">
+ <bean class="….web.servlet.mvc.annotation.AnnotationMethodHandlerAdapter">
<property name="customArgumentResolvers">
<list>
<bean class="org.springframework.data.web.PageableArgumentResolver" />
@@ -1227,11 +1259,11 @@
</property>
</bean>
- This configuration allows you to simplify controllers down to
- something like this:
-
+ This configuration allows you to simplify controllers down to
+ something like this:
+
- @Controller
+ @Controller
@RequestMapping("/users")
public class UserController {
@@ -1241,168 +1273,170 @@
model.addAttribute("users", userDao.readAll(pageable));
return "users";
}
- }
+ }
+
-
- The
- PageableArgumentResolver
- will
- automatically resolve request parameters to build a
- PageRequest
- instance. By default it will expect
- the following structure for the request parameters:
-
+
+ The
+ PageableArgumentResolver
+ will
+ automatically resolve request parameters to build a
+ PageRequest
+ instance. By default it will expect
+ the following structure for the request parameters:
+
-
-
- Request parameters evaluated by
- PageableArgumentResolver
-
+
+
+ Request parameters evaluated by
+ PageableArgumentResolver
+
-
-
+
+
-
+
-
-
-
- page
-
+
+
+
+ page
+
- The page you want to retrieve
-
+ The page you want to retrieve
+
-
-
- page.size
-
+
+
+ page.size
+
- The size of the page you want to retrieve
-
+ The size of the page you want to retrieve
+
-
-
- page.sort
-
+
+
+ page.sort
+
- The property that should be sorted by
-
+ The property that should be sorted by
+
-
-
- page.sort.dir
-
+
+
+ page.sort.dir
+
- The direction that should be used for sorting
-
-
-
-
+ The direction that should be used for sorting
+
+
+
+
-
- In case you need multiple
- Pageable
- s
- to be resolved from the request (for multiple tables e.g.) you can use
- Spring's
- @Qualifier
- annotation to
- distinguish one from another. The request parameters then have to be
- prefixed with
- ${qualifier}_
- . So a method signature like
- this:
-
+
+ In case you need multiple
+ Pageable
+ s
+ to be resolved from the request (for multiple tables e.g.) you can use
+ Spring's
+ @Qualifier
+ annotation to
+ distinguish one from another. The request parameters then have to be
+ prefixed with
+ ${qualifier}_
+ . So a method signature like
+ this:
+
- public String showUsers(Model model,
+ public String showUsers(Model model,
@Qualifier("foo") Pageable first,
@Qualifier("bar") Pageable second) { … }
-
- you'd have to populate
- foo_page
- and
- bar_page
- and the according subproperties.
-
+
+ you'd have to populate
+ foo_page
+ and
+ bar_page
+ and the according subproperties.
+
-
- Defaulting
+
+ Defaulting
-
- The
- PageableArgumentResolver
- will use a
- PageRequest
- with the first page and a page size
- of 10 by default and will use that in case it can't resolve a
- PageRequest
- from the request (because of
- missing parameters e.g.). You can configure a global default on the
- bean declaration directly. In case you might need controller method
- specific defaults for the
- Pageable
- simply annotate the method parameter with
- @PageableDefaults
- and specify page and
- page size as annotation attributes:
-
+
+ The
+ PageableArgumentResolver
+ will use a
+ PageRequest
+ with the first page and a page size
+ of 10 by default and will use that in case it can't resolve a
+ PageRequest
+ from the request (because of
+ missing parameters e.g.). You can configure a global default on the
+ bean declaration directly. In case you might need controller method
+ specific defaults for the
+ Pageable
+ simply annotate the method parameter with
+ @PageableDefaults
+ and specify page and
+ page size as annotation attributes:
+
- public String showUsers(Model model,
+ public String showUsers(Model model,
@PageableDefaults(pageNumber = 0, value = 30) Pageable pageable) { … }
-
-
+
+
-
- Repository populators
+
+ Repository populators
- If you have been working with the JDBC module of Spring you're
- probably familiar with the support to populate a DataSource using
- SQL
- scripts. A similar abstraction is available on the repositories level
- although we don't use SQL as data definition language as we need to
- be
- store independent of course. Thus the populators support XML (through
- Spring's OXM abstraction) and JSON (through Jackson) to define data
- for
- the repositories to be populated with.
-
+ If you have been working with the JDBC module of Spring you're
+ probably familiar with the support to populate a DataSource using
+ SQL
+ scripts. A similar abstraction is available on the repositories level
+ although we don't use SQL as data definition language as we need to
+ be
+ store independent of course. Thus the populators support XML (through
+ Spring's OXM abstraction) and JSON (through Jackson) to define data
+ for
+ the repositories to be populated with.
+
-
- Assume you have a file
- data.json
- with the
- following content:
-
+
+ Assume you have a file
+ data.json
+ with the
+ following content:
+
-
- Data defined in JSON
+
+ Data defined in JSON
- [ { "_class" : "com.acme.Person",
+ [ { "_class" : "com.acme.Person",
"firstname" : "Dave",
"lastname" : "Matthews" },
{ "_class" : "com.acme.Person",
"firstname" : "Carter",
- "lastname" : "Beauford" } ]
-
+ "lastname" : "Beauford" } ]
+
+
-
- You can easily populate you repositories by using the populator
- elements of the repository namespace provided in Spring Data
- Commons. To
- get the just shown data be populated to your
- PersonRepository
- all you need to do is
- the following:
-
+
+ You can easily populate you repositories by using the populator
+ elements of the repository namespace provided in Spring Data
+ Commons. To
+ get the just shown data be populated to your
+ PersonRepository
+ all you need to do is
+ the following:
+
-
- Declaring a Jackson repository populator
+
+ Declaring a Jackson repository populator
- <?xml version="1.0" encoding="UTF-8"?>
+ <?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:repository="http://www.springframework.org/schema/data/repository"
@@ -1414,33 +1448,33 @@
<repository:jackson-populator location="classpath:data.json" />
</beans>
-
+
-
- This declaration causes the data.json file being read,
- deserialized by a Jackson
- ObjectMapper
- . The type
- the JSON object will be unmarshalled to will be determined by
- inspecting
- the
- _class
- attribute of the JSON document. We will
- eventually select the appropriate repository being able to handle the
- object just deserialized.
-
+
+ This declaration causes the data.json file being read,
+ deserialized by a Jackson
+ ObjectMapper
+ . The type
+ the JSON object will be unmarshalled to will be determined by
+ inspecting
+ the
+ _class
+ attribute of the JSON document. We will
+ eventually select the appropriate repository being able to handle the
+ object just deserialized.
+
- To rather use XML to define the repositories shall be populated
- with you can use the unmarshaller-populator you hand one of the
- marshaller options Spring OXM provides you with.
-
+ To rather use XML to define the repositories shall be populated
+ with you can use the unmarshaller-populator you hand one of the
+ marshaller options Spring OXM provides you with.
+
-
- Declaring an unmarshalling repository populator (using
- JAXB)
-
+
+ Declaring an unmarshalling repository populator (using
+ JAXB)
+
- <?xml version="1.0" encoding="UTF-8"?>
+ <?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:repository="http://www.springframework.org/schema/data/repository"
@@ -1452,12 +1486,13 @@
http://www.springframework.org/schema/oxm
http://www.springframework.org/schema/oxm/spring-oxm.xsd">
- <repository:unmarshaller-populator location="classpath:data.json" unmarshaller-ref="unmarshaller" />
+ <repository:unmarshaller-populator location="classpath:data.json" unmarshaller-ref="unmarshaller"
+ />
<oxm:jaxb2-marshaller contextPath="com.acme" />
</beans>
-
-
-
+
+
+
\ No newline at end of file
diff --git a/src/main/java/org/springframework/data/elasticsearch/ElasticsearchException.java b/src/main/java/org/springframework/data/elasticsearch/ElasticsearchException.java
index ba64746c4..b3b0ddb20 100644
--- a/src/main/java/org/springframework/data/elasticsearch/ElasticsearchException.java
+++ b/src/main/java/org/springframework/data/elasticsearch/ElasticsearchException.java
@@ -19,7 +19,7 @@ import java.util.Map;
/**
* ElasticsearchException
- *
+ *
* @author Rizwan Idrees
* @author Mohsin Husen
*/
diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/DateFormat.java b/src/main/java/org/springframework/data/elasticsearch/annotations/DateFormat.java
index 55ed5d9ed..056dc1482 100644
--- a/src/main/java/org/springframework/data/elasticsearch/annotations/DateFormat.java
+++ b/src/main/java/org/springframework/data/elasticsearch/annotations/DateFormat.java
@@ -17,16 +17,15 @@ package org.springframework.data.elasticsearch.annotations;
/**
* @author Jakub Vavrik
- *
- * Values based on reference doc - http://www.elasticsearch.org/guide/reference/mapping/date-format/
+ * Values based on reference doc - http://www.elasticsearch.org/guide/reference/mapping/date-format/
*/
public enum DateFormat {
- none, custom, basic_date, basic_date_time, basic_date_time_no_millis, basic_ordinal_date, basic_ordinal_date_time,
- basic_ordinal_date_time_no_millis, basic_time, basic_time_no_millis, basic_t_time, basic_t_time_no_millis,
- basic_week_date, basic_week_date_time, basic_week_date_time_no_millis, date, date_hour, date_hour_minute,
- date_hour_minute_second, date_hour_minute_second_fraction, date_hour_minute_second_millis, date_optional_time,
- date_time, date_time_no_millis, hour, hour_minute, hour_minute_second, hour_minute_second_fraction,
- hour_minute_second_millis, ordinal_date, ordinal_date_time, ordinal_date_time_no_millis, time, time_no_millis,
- t_time, t_time_no_millis, week_date, week_date_time, weekDateTimeNoMillis, week_year, weekyearWeek,
- weekyearWeekDay, year, year_month, year_month_day
+ none, custom, basic_date, basic_date_time, basic_date_time_no_millis, basic_ordinal_date, basic_ordinal_date_time,
+ basic_ordinal_date_time_no_millis, basic_time, basic_time_no_millis, basic_t_time, basic_t_time_no_millis,
+ basic_week_date, basic_week_date_time, basic_week_date_time_no_millis, date, date_hour, date_hour_minute,
+ date_hour_minute_second, date_hour_minute_second_fraction, date_hour_minute_second_millis, date_optional_time,
+ date_time, date_time_no_millis, hour, hour_minute, hour_minute_second, hour_minute_second_fraction,
+ hour_minute_second_millis, ordinal_date, ordinal_date_time, ordinal_date_time_no_millis, time, time_no_millis,
+ t_time, t_time_no_millis, week_date, week_date_time, weekDateTimeNoMillis, week_year, weekyearWeek,
+ weekyearWeekDay, year, year_month, year_month_day
}
diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Document.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Document.java
index 4299759f4..ef32e7e38 100644
--- a/src/main/java/org/springframework/data/elasticsearch/annotations/Document.java
+++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Document.java
@@ -15,13 +15,13 @@
*/
package org.springframework.data.elasticsearch.annotations;
-import org.springframework.data.annotation.Persistent;
-
import java.lang.annotation.*;
+import org.springframework.data.annotation.Persistent;
+
/**
* Document
- *
+ *
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@@ -29,7 +29,7 @@ import java.lang.annotation.*;
@Persistent
@Inherited
@Retention(RetentionPolicy.RUNTIME)
-@Target({ ElementType.TYPE })
+@Target({ElementType.TYPE})
public @interface Document {
String indexName();
diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Field.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Field.java
index 727d213a9..708fa8b0d 100644
--- a/src/main/java/org/springframework/data/elasticsearch/annotations/Field.java
+++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Field.java
@@ -29,20 +29,19 @@ import java.lang.annotation.*;
@Documented
public @interface Field {
- FieldType type() default FieldType.Auto;
+ FieldType type() default FieldType.Auto;
- FieldIndex index() default FieldIndex.analyzed;
+ FieldIndex index() default FieldIndex.analyzed;
- DateFormat format() default DateFormat.none;
+ DateFormat format() default DateFormat.none;
- String pattern() default "";
+ String pattern() default "";
- boolean store() default false;
+ boolean store() default false;
- String searchAnalyzer() default "";
+ String searchAnalyzer() default "";
- String indexAnalyzer() default "";
-
- String [] ignoreFields() default {};
+ String indexAnalyzer() default "";
+ String[] ignoreFields() default {};
}
diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/FieldIndex.java b/src/main/java/org/springframework/data/elasticsearch/annotations/FieldIndex.java
index deeff821d..9eee6423b 100644
--- a/src/main/java/org/springframework/data/elasticsearch/annotations/FieldIndex.java
+++ b/src/main/java/org/springframework/data/elasticsearch/annotations/FieldIndex.java
@@ -20,5 +20,5 @@ package org.springframework.data.elasticsearch.annotations;
* @author Mohsin Husen
*/
public enum FieldIndex {
- not_analyzed, analyzed
+ not_analyzed, analyzed
}
diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/FieldType.java b/src/main/java/org/springframework/data/elasticsearch/annotations/FieldType.java
index a8a6860d8..38062bfa6 100644
--- a/src/main/java/org/springframework/data/elasticsearch/annotations/FieldType.java
+++ b/src/main/java/org/springframework/data/elasticsearch/annotations/FieldType.java
@@ -21,5 +21,5 @@ package org.springframework.data.elasticsearch.annotations;
* @author Artur Konczak
*/
public enum FieldType {
- String, Integer, Long, Date, Float, Double, Boolean, Object, Auto, Nested
+ String, Integer, Long, Date, Float, Double, Boolean, Object, Auto, Nested
}
diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/MultiField.java b/src/main/java/org/springframework/data/elasticsearch/annotations/MultiField.java
index 54e865d73..41c781e80 100644
--- a/src/main/java/org/springframework/data/elasticsearch/annotations/MultiField.java
+++ b/src/main/java/org/springframework/data/elasticsearch/annotations/MultiField.java
@@ -28,7 +28,7 @@ import java.lang.annotation.*;
@Documented
public @interface MultiField {
- public Field mainField();
+ public Field mainField();
- public NestedField[] otherFields() default {};
+ public NestedField[] otherFields() default {};
}
diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/NestedField.java b/src/main/java/org/springframework/data/elasticsearch/annotations/NestedField.java
index 45a64f177..7a7f001f8 100644
--- a/src/main/java/org/springframework/data/elasticsearch/annotations/NestedField.java
+++ b/src/main/java/org/springframework/data/elasticsearch/annotations/NestedField.java
@@ -27,15 +27,15 @@ import java.lang.annotation.Target;
@Target(ElementType.FIELD)
public @interface NestedField {
- String dotSuffix();
+ String dotSuffix();
- FieldType type();
+ FieldType type();
- FieldIndex index() default FieldIndex.analyzed;
+ FieldIndex index() default FieldIndex.analyzed;
- boolean store() default false;
+ boolean store() default false;
- String searchAnalyzer() default "";
+ String searchAnalyzer() default "";
- String indexAnalyzer() default "";
+ String indexAnalyzer() default "";
}
diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Parent.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Parent.java
index a0208ad7b..940e6f04f 100644
--- a/src/main/java/org/springframework/data/elasticsearch/annotations/Parent.java
+++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Parent.java
@@ -21,7 +21,7 @@ import org.springframework.data.annotation.Persistent;
/**
* Parent
- *
+ *
* @author Philipp Jardas
*/
@@ -30,5 +30,6 @@ import org.springframework.data.annotation.Persistent;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface Parent {
+
String type();
}
diff --git a/src/main/java/org/springframework/data/elasticsearch/annotations/Query.java b/src/main/java/org/springframework/data/elasticsearch/annotations/Query.java
index cc78366f5..3c91cdf29 100644
--- a/src/main/java/org/springframework/data/elasticsearch/annotations/Query.java
+++ b/src/main/java/org/springframework/data/elasticsearch/annotations/Query.java
@@ -19,7 +19,7 @@ import java.lang.annotation.*;
/**
* Query
- *
+ *
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@@ -31,16 +31,15 @@ public @interface Query {
/**
* Elasticsearch query to be used when executing query. May contain placeholders eg. ?0
- *
+ *
* @return
*/
String value() default "";
/**
* Named Query Named looked up by repository.
- *
+ *
* @return
*/
String name() default "";
-
}
diff --git a/src/main/java/org/springframework/data/elasticsearch/client/NodeClientFactoryBean.java b/src/main/java/org/springframework/data/elasticsearch/client/NodeClientFactoryBean.java
index c90f952c0..8caa15841 100644
--- a/src/main/java/org/springframework/data/elasticsearch/client/NodeClientFactoryBean.java
+++ b/src/main/java/org/springframework/data/elasticsearch/client/NodeClientFactoryBean.java
@@ -15,6 +15,8 @@
*/
package org.springframework.data.elasticsearch.client;
+import static org.elasticsearch.node.NodeBuilder.*;
+
import org.elasticsearch.client.Client;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.settings.ImmutableSettings;
@@ -24,11 +26,9 @@ import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
-import static org.elasticsearch.node.NodeBuilder.nodeBuilder;
-
/**
* NodeClientFactoryBean
- *
+ *
* @author Rizwan Idrees
* @author Mohsin Husen
*/
diff --git a/src/main/java/org/springframework/data/elasticsearch/client/TransportClientFactoryBean.java b/src/main/java/org/springframework/data/elasticsearch/client/TransportClientFactoryBean.java
index bd3294f26..6d03216ee 100644
--- a/src/main/java/org/springframework/data/elasticsearch/client/TransportClientFactoryBean.java
+++ b/src/main/java/org/springframework/data/elasticsearch/client/TransportClientFactoryBean.java
@@ -15,6 +15,11 @@
*/
package org.springframework.data.elasticsearch.client;
+import static org.apache.commons.lang.StringUtils.*;
+import static org.elasticsearch.common.settings.ImmutableSettings.*;
+
+import java.util.Properties;
+
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
@@ -25,11 +30,6 @@ import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.util.Assert;
-import java.util.Properties;
-
-import static org.apache.commons.lang.StringUtils.*;
-import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
-
/**
* TransportClientFactoryBean
*
@@ -40,114 +40,114 @@ import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilde
public class TransportClientFactoryBean implements FactoryBean, InitializingBean, DisposableBean {
- private static final Logger logger = LoggerFactory.getLogger(TransportClientFactoryBean.class);
- private String clusterNodes;
- private String clusterName;
- private Boolean clientTransportSniff;
- private Boolean clientIgnoreClusterName = Boolean.FALSE;
- private String clientPingTimeout = "5s";
- private String clientNodesSamplerInterval = "5s";
- private TransportClient client;
- private Properties properties;
- static final String COLON = ":";
- static final String COMMA = ",";
+ private static final Logger logger = LoggerFactory.getLogger(TransportClientFactoryBean.class);
+ private String clusterNodes;
+ private String clusterName;
+ private Boolean clientTransportSniff;
+ private Boolean clientIgnoreClusterName = Boolean.FALSE;
+ private String clientPingTimeout = "5s";
+ private String clientNodesSamplerInterval = "5s";
+ private TransportClient client;
+ private Properties properties;
+ static final String COLON = ":";
+ static final String COMMA = ",";
- @Override
- public void destroy() throws Exception {
- try {
- logger.info("Closing elasticSearch client");
- if (client != null) {
- client.close();
- }
- } catch (final Exception e) {
- logger.error("Error closing ElasticSearch client: ", e);
- }
- }
+ @Override
+ public void destroy() throws Exception {
+ try {
+ logger.info("Closing elasticSearch client");
+ if (client != null) {
+ client.close();
+ }
+ } catch (final Exception e) {
+ logger.error("Error closing ElasticSearch client: ", e);
+ }
+ }
- @Override
- public TransportClient getObject() throws Exception {
- return client;
- }
+ @Override
+ public TransportClient getObject() throws Exception {
+ return client;
+ }
- @Override
- public Class getObjectType() {
- return TransportClient.class;
- }
+ @Override
+ public Class getObjectType() {
+ return TransportClient.class;
+ }
- @Override
- public boolean isSingleton() {
- return false;
- }
+ @Override
+ public boolean isSingleton() {
+ return false;
+ }
- @Override
- public void afterPropertiesSet() throws Exception {
- buildClient();
- }
+ @Override
+ public void afterPropertiesSet() throws Exception {
+ buildClient();
+ }
- protected void buildClient() throws Exception {
- client = new TransportClient(settings());
- Assert.hasText(clusterNodes, "[Assertion failed] clusterNodes settings missing.");
- for (String clusterNode : split(clusterNodes,COMMA)) {
- String hostName = substringBefore(clusterNode, COLON);
- String port = substringAfter(clusterNode, COLON);
- Assert.hasText(hostName, "[Assertion failed] missing host name in 'clusterNodes'");
- Assert.hasText(port, "[Assertion failed] missing port in 'clusterNodes'");
- logger.info("adding transport node : " + clusterNode);
- client.addTransportAddress(new InetSocketTransportAddress(hostName, Integer.valueOf(port)));
- }
- client.connectedNodes();
- }
+ protected void buildClient() throws Exception {
+ client = new TransportClient(settings());
+ Assert.hasText(clusterNodes, "[Assertion failed] clusterNodes settings missing.");
+ for (String clusterNode : split(clusterNodes, COMMA)) {
+ String hostName = substringBefore(clusterNode, COLON);
+ String port = substringAfter(clusterNode, COLON);
+ Assert.hasText(hostName, "[Assertion failed] missing host name in 'clusterNodes'");
+ Assert.hasText(port, "[Assertion failed] missing port in 'clusterNodes'");
+ logger.info("adding transport node : " + clusterNode);
+ client.addTransportAddress(new InetSocketTransportAddress(hostName, Integer.valueOf(port)));
+ }
+ client.connectedNodes();
+ }
- private Settings settings() {
- if (properties != null) {
- return settingsBuilder().put(properties).build();
- }
- return settingsBuilder()
- .put("cluster.name", clusterName)
- .put("client.transport.sniff", clientTransportSniff)
- .put("client.transport.ignore_cluster_name", clientIgnoreClusterName)
- .put("client.transport.ping_timeout", clientPingTimeout)
- .put("client.transport.nodes_sampler_interval", clientNodesSamplerInterval)
- .build();
- }
+ private Settings settings() {
+ if (properties != null) {
+ return settingsBuilder().put(properties).build();
+ }
+ return settingsBuilder()
+ .put("cluster.name", clusterName)
+ .put("client.transport.sniff", clientTransportSniff)
+ .put("client.transport.ignore_cluster_name", clientIgnoreClusterName)
+ .put("client.transport.ping_timeout", clientPingTimeout)
+ .put("client.transport.nodes_sampler_interval", clientNodesSamplerInterval)
+ .build();
+ }
- public void setClusterNodes(String clusterNodes) {
- this.clusterNodes = clusterNodes;
- }
+ public void setClusterNodes(String clusterNodes) {
+ this.clusterNodes = clusterNodes;
+ }
- public void setClusterName(String clusterName) {
- this.clusterName = clusterName;
- }
+ public void setClusterName(String clusterName) {
+ this.clusterName = clusterName;
+ }
- public void setClientTransportSniff(Boolean clientTransportSniff) {
- this.clientTransportSniff = clientTransportSniff;
- }
+ public void setClientTransportSniff(Boolean clientTransportSniff) {
+ this.clientTransportSniff = clientTransportSniff;
+ }
- public String getClientNodesSamplerInterval() {
- return clientNodesSamplerInterval;
- }
+ public String getClientNodesSamplerInterval() {
+ return clientNodesSamplerInterval;
+ }
- public void setClientNodesSamplerInterval(String clientNodesSamplerInterval) {
- this.clientNodesSamplerInterval = clientNodesSamplerInterval;
- }
+ public void setClientNodesSamplerInterval(String clientNodesSamplerInterval) {
+ this.clientNodesSamplerInterval = clientNodesSamplerInterval;
+ }
- public String getClientPingTimeout() {
- return clientPingTimeout;
- }
+ public String getClientPingTimeout() {
+ return clientPingTimeout;
+ }
- public void setClientPingTimeout(String clientPingTimeout) {
- this.clientPingTimeout = clientPingTimeout;
- }
+ public void setClientPingTimeout(String clientPingTimeout) {
+ this.clientPingTimeout = clientPingTimeout;
+ }
- public Boolean getClientIgnoreClusterName() {
- return clientIgnoreClusterName;
- }
+ public Boolean getClientIgnoreClusterName() {
+ return clientIgnoreClusterName;
+ }
- public void setClientIgnoreClusterName(Boolean clientIgnoreClusterName) {
- this.clientIgnoreClusterName = clientIgnoreClusterName;
- }
+ public void setClientIgnoreClusterName(Boolean clientIgnoreClusterName) {
+ this.clientIgnoreClusterName = clientIgnoreClusterName;
+ }
- public void setProperties(Properties properties) {
- this.properties = properties;
- }
+ public void setProperties(Properties properties) {
+ this.properties = properties;
+ }
}
diff --git a/src/main/java/org/springframework/data/elasticsearch/config/ElasticsearchNamespaceHandler.java b/src/main/java/org/springframework/data/elasticsearch/config/ElasticsearchNamespaceHandler.java
index 011ed164f..bfb65686a 100644
--- a/src/main/java/org/springframework/data/elasticsearch/config/ElasticsearchNamespaceHandler.java
+++ b/src/main/java/org/springframework/data/elasticsearch/config/ElasticsearchNamespaceHandler.java
@@ -22,7 +22,7 @@ import org.springframework.data.repository.config.RepositoryConfigurationExtensi
/**
* ElasticsearchNamespaceHandler
- *
+ *
* @author Rizwan Idrees
* @author Mohsin Husen
*/
diff --git a/src/main/java/org/springframework/data/elasticsearch/config/NodeClientBeanDefinitionParser.java b/src/main/java/org/springframework/data/elasticsearch/config/NodeClientBeanDefinitionParser.java
index 387032fcd..909ccfbbc 100644
--- a/src/main/java/org/springframework/data/elasticsearch/config/NodeClientBeanDefinitionParser.java
+++ b/src/main/java/org/springframework/data/elasticsearch/config/NodeClientBeanDefinitionParser.java
@@ -25,7 +25,7 @@ import org.w3c.dom.Element;
/**
* NodeClientBeanDefinitionParser
- *
+ *
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@@ -46,7 +46,7 @@ public class NodeClientBeanDefinitionParser extends AbstractBeanDefinitionParser
}
private AbstractBeanDefinition getSourcedBeanDefinition(BeanDefinitionBuilder builder, Element source,
- ParserContext context) {
+ ParserContext context) {
AbstractBeanDefinition definition = builder.getBeanDefinition();
definition.setSource(context.extractSource(source));
return definition;
diff --git a/src/main/java/org/springframework/data/elasticsearch/config/TransportClientBeanDefinitionParser.java b/src/main/java/org/springframework/data/elasticsearch/config/TransportClientBeanDefinitionParser.java
index b00451637..54679fa30 100644
--- a/src/main/java/org/springframework/data/elasticsearch/config/TransportClientBeanDefinitionParser.java
+++ b/src/main/java/org/springframework/data/elasticsearch/config/TransportClientBeanDefinitionParser.java
@@ -22,11 +22,9 @@ import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.data.elasticsearch.client.TransportClientFactoryBean;
import org.w3c.dom.Element;
-import static org.apache.commons.lang.StringUtils.split;
-
/**
* TransportClientBeanDefinitionParser
- *
+ *
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@@ -42,15 +40,15 @@ public class TransportClientBeanDefinitionParser extends AbstractBeanDefinitionP
private void setConfigurations(Element element, BeanDefinitionBuilder builder) {
builder.addPropertyValue("clusterNodes", element.getAttribute("cluster-nodes"));
- builder.addPropertyValue("clusterName", element.getAttribute("cluster-name"));
- builder.addPropertyValue("clientTransportSniff", Boolean.valueOf(element.getAttribute("client-transport-sniff")));
- builder.addPropertyValue("clientIgnoreClusterName", Boolean.valueOf(element.getAttribute("client-transport-ignore-cluster-name")));
- builder.addPropertyValue("clientPingTimeout", element.getAttribute("client-transport-ping-timeout"));
- builder.addPropertyValue("clientNodesSamplerInterval", element.getAttribute("client-transport-nodes-sampler-interval"));
+ builder.addPropertyValue("clusterName", element.getAttribute("cluster-name"));
+ builder.addPropertyValue("clientTransportSniff", Boolean.valueOf(element.getAttribute("client-transport-sniff")));
+ builder.addPropertyValue("clientIgnoreClusterName", Boolean.valueOf(element.getAttribute("client-transport-ignore-cluster-name")));
+ builder.addPropertyValue("clientPingTimeout", element.getAttribute("client-transport-ping-timeout"));
+ builder.addPropertyValue("clientNodesSamplerInterval", element.getAttribute("client-transport-nodes-sampler-interval"));
}
private AbstractBeanDefinition getSourcedBeanDefinition(BeanDefinitionBuilder builder, Element source,
- ParserContext context) {
+ ParserContext context) {
AbstractBeanDefinition definition = builder.getBeanDefinition();
definition.setSource(context.extractSource(source));
return definition;
diff --git a/src/main/java/org/springframework/data/elasticsearch/core/AbstractResultMapper.java b/src/main/java/org/springframework/data/elasticsearch/core/AbstractResultMapper.java
index 30d507a5d..0f8a16829 100644
--- a/src/main/java/org/springframework/data/elasticsearch/core/AbstractResultMapper.java
+++ b/src/main/java/org/springframework/data/elasticsearch/core/AbstractResultMapper.java
@@ -15,36 +15,36 @@
*/
package org.springframework.data.elasticsearch.core;
-import org.springframework.data.elasticsearch.ElasticsearchException;
+import static org.apache.commons.lang.StringUtils.*;
import java.io.IOException;
-import static org.apache.commons.lang.StringUtils.isBlank;
+import org.springframework.data.elasticsearch.ElasticsearchException;
/**
* @author Artur Konczak
*/
public abstract class AbstractResultMapper implements ResultsMapper {
- private EntityMapper entityMapper;
+ private EntityMapper entityMapper;
- public AbstractResultMapper(EntityMapper entityMapper) {
- this.entityMapper = entityMapper;
- }
+ public AbstractResultMapper(EntityMapper entityMapper) {
+ this.entityMapper = entityMapper;
+ }
- public T mapEntity(String source, Class clazz) {
- if (isBlank(source)) {
- return null;
- }
- try {
- return entityMapper.mapToObject(source, clazz);
- } catch (IOException e) {
- throw new ElasticsearchException("failed to map source [ " + source + "] to class " + clazz.getSimpleName(), e);
- }
- }
+ public T mapEntity(String source, Class clazz) {
+ if (isBlank(source)) {
+ return null;
+ }
+ try {
+ return entityMapper.mapToObject(source, clazz);
+ } catch (IOException e) {
+ throw new ElasticsearchException("failed to map source [ " + source + "] to class " + clazz.getSimpleName(), e);
+ }
+ }
- @Override
- public EntityMapper getEntityMapper() {
- return this.entityMapper;
- }
+ @Override
+ public EntityMapper getEntityMapper() {
+ return this.entityMapper;
+ }
}
diff --git a/src/main/java/org/springframework/data/elasticsearch/core/CriteriaFilterProcessor.java b/src/main/java/org/springframework/data/elasticsearch/core/CriteriaFilterProcessor.java
index 5f9c26ae1..fde394713 100644
--- a/src/main/java/org/springframework/data/elasticsearch/core/CriteriaFilterProcessor.java
+++ b/src/main/java/org/springframework/data/elasticsearch/core/CriteriaFilterProcessor.java
@@ -15,19 +15,21 @@
*/
package org.springframework.data.elasticsearch.core;
-import org.elasticsearch.index.query.*;
-import org.springframework.data.elasticsearch.core.geo.GeoBox;
-import org.springframework.data.elasticsearch.core.geo.GeoPoint;
-import org.springframework.data.elasticsearch.core.query.Criteria;
-import org.springframework.util.Assert;
+import static org.elasticsearch.index.query.FilterBuilders.*;
+import static org.springframework.data.elasticsearch.core.query.Criteria.*;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
-import static org.elasticsearch.index.query.FilterBuilders.*;
-import static org.springframework.data.elasticsearch.core.query.Criteria.OperationKey;
+import org.elasticsearch.index.query.FilterBuilder;
+import org.elasticsearch.index.query.GeoBoundingBoxFilterBuilder;
+import org.elasticsearch.index.query.GeoDistanceFilterBuilder;
+import org.springframework.data.elasticsearch.core.geo.GeoBox;
+import org.springframework.data.elasticsearch.core.geo.GeoPoint;
+import org.springframework.data.elasticsearch.core.query.Criteria;
+import org.springframework.util.Assert;
/**
* CriteriaFilterProcessor
@@ -37,160 +39,158 @@ import static org.springframework.data.elasticsearch.core.query.Criteria.Operati
class CriteriaFilterProcessor {
- FilterBuilder createFilterFromCriteria(Criteria criteria) {
- List fbList = new LinkedList();
- FilterBuilder filter = null;
+ FilterBuilder createFilterFromCriteria(Criteria criteria) {
+ List fbList = new LinkedList();
+ FilterBuilder filter = null;
- ListIterator chainIterator = criteria.getCriteriaChain().listIterator();
+ ListIterator chainIterator = criteria.getCriteriaChain().listIterator();
- while (chainIterator.hasNext()) {
- FilterBuilder fb = null;
- Criteria chainedCriteria = chainIterator.next();
- if (chainedCriteria.isOr()) {
- fb = orFilter(createFilterFragmentForCriteria(chainedCriteria).toArray(new FilterBuilder[]{}));
- fbList.add(fb);
- } else if (chainedCriteria.isNegating()) {
- List negationFilters = buildNegationFilter(criteria.getField().getName(), criteria.getFilterCriteriaEntries().iterator());
+ while (chainIterator.hasNext()) {
+ FilterBuilder fb = null;
+ Criteria chainedCriteria = chainIterator.next();
+ if (chainedCriteria.isOr()) {
+ fb = orFilter(createFilterFragmentForCriteria(chainedCriteria).toArray(new FilterBuilder[]{}));
+ fbList.add(fb);
+ } else if (chainedCriteria.isNegating()) {
+ List negationFilters = buildNegationFilter(criteria.getField().getName(), criteria.getFilterCriteriaEntries().iterator());
- if (!negationFilters.isEmpty()) {
- fbList.addAll(negationFilters);
- }
- } else {
- fbList.addAll(createFilterFragmentForCriteria(chainedCriteria));
- }
- }
+ if (!negationFilters.isEmpty()) {
+ fbList.addAll(negationFilters);
+ }
+ } else {
+ fbList.addAll(createFilterFragmentForCriteria(chainedCriteria));
+ }
+ }
- if (!fbList.isEmpty()) {
- if (fbList.size() == 1) {
- filter = fbList.get(0);
- } else {
- filter = andFilter(fbList.toArray(new FilterBuilder[]{}));
- }
- }
+ if (!fbList.isEmpty()) {
+ if (fbList.size() == 1) {
+ filter = fbList.get(0);
+ } else {
+ filter = andFilter(fbList.toArray(new FilterBuilder[]{}));
+ }
+ }
- return filter;
- }
+ return filter;
+ }
- private List createFilterFragmentForCriteria(Criteria chainedCriteria) {
- Iterator it = chainedCriteria.getFilterCriteriaEntries().iterator();
- List filterList = new LinkedList();
+ private List createFilterFragmentForCriteria(Criteria chainedCriteria) {
+ Iterator it = chainedCriteria.getFilterCriteriaEntries().iterator();
+ List filterList = new LinkedList();
- String fieldName = chainedCriteria.getField().getName();
- Assert.notNull(fieldName, "Unknown field");
- FilterBuilder filter = null;
+ String fieldName = chainedCriteria.getField().getName();
+ Assert.notNull(fieldName, "Unknown field");
+ FilterBuilder filter = null;
- while (it.hasNext()) {
- Criteria.CriteriaEntry entry = it.next();
- filter = processCriteriaEntry(entry.getKey(), entry.getValue(), fieldName);
- filterList.add(filter);
- }
+ while (it.hasNext()) {
+ Criteria.CriteriaEntry entry = it.next();
+ filter = processCriteriaEntry(entry.getKey(), entry.getValue(), fieldName);
+ filterList.add(filter);
+ }
- return filterList;
- }
+ return filterList;
+ }
- private FilterBuilder processCriteriaEntry(OperationKey key, Object value, String fieldName) {
- if (value == null) {
- return null;
- }
- FilterBuilder filter = null;
+ private FilterBuilder processCriteriaEntry(OperationKey key, Object value, String fieldName) {
+ if (value == null) {
+ return null;
+ }
+ FilterBuilder filter = null;
- switch (key) {
- case WITHIN: {
- filter = geoDistanceFilter(fieldName);
+ switch (key) {
+ case WITHIN: {
+ filter = geoDistanceFilter(fieldName);
- Assert.isTrue(value instanceof Object[], "Value of a geo distance filter should be an array of two values.");
- Object[] valArray = (Object[]) value;
- Assert.noNullElements(valArray, "Geo distance filter takes 2 not null elements array as parameter.");
- Assert.isTrue(valArray.length == 2, "Geo distance filter takes a 2-elements array as parameter.");
- Assert.isTrue(valArray[0] instanceof GeoPoint || valArray[0] instanceof String, "First element of a geo distance filter must be a GeoLocation or String");
- Assert.isTrue(valArray[1] instanceof String, "Second element of a geo distance filter must be a String");
+ Assert.isTrue(value instanceof Object[], "Value of a geo distance filter should be an array of two values.");
+ Object[] valArray = (Object[]) value;
+ Assert.noNullElements(valArray, "Geo distance filter takes 2 not null elements array as parameter.");
+ Assert.isTrue(valArray.length == 2, "Geo distance filter takes a 2-elements array as parameter.");
+ Assert.isTrue(valArray[0] instanceof GeoPoint || valArray[0] instanceof String, "First element of a geo distance filter must be a GeoLocation or String");
+ Assert.isTrue(valArray[1] instanceof String, "Second element of a geo distance filter must be a String");
- String dist = (String) valArray[1];
- if (valArray[0] instanceof GeoPoint) {
- GeoPoint loc = (GeoPoint) valArray[0];
- ((GeoDistanceFilterBuilder) filter).lat(loc.getLat()).lon(loc.getLon()).distance(dist);
- } else {
- String loc = (String) valArray[0];
- if (loc.contains(",")) {
- String c[] = loc.split(",");
- ((GeoDistanceFilterBuilder) filter).lat(Double.parseDouble(c[0])).lon(Double.parseDouble(c[1])).distance(dist);
- } else {
- ((GeoDistanceFilterBuilder) filter).geohash(loc).distance(dist);
- }
+ String dist = (String) valArray[1];
+ if (valArray[0] instanceof GeoPoint) {
+ GeoPoint loc = (GeoPoint) valArray[0];
+ ((GeoDistanceFilterBuilder) filter).lat(loc.getLat()).lon(loc.getLon()).distance(dist);
+ } else {
+ String loc = (String) valArray[0];
+ if (loc.contains(",")) {
+ String c[] = loc.split(",");
+ ((GeoDistanceFilterBuilder) filter).lat(Double.parseDouble(c[0])).lon(Double.parseDouble(c[1])).distance(dist);
+ } else {
+ ((GeoDistanceFilterBuilder) filter).geohash(loc).distance(dist);
+ }
+ }
- }
+ break;
+ }
- break;
- }
+ case BBOX: {
+ filter = geoBoundingBoxFilter(fieldName);
- case BBOX: {
- filter = geoBoundingBoxFilter(fieldName);
+ Assert.isTrue(value instanceof Object[], "Value of a boundedBy filter should be an array of one or two values.");
+ Object[] valArray = (Object[]) value;
+ Assert.noNullElements(valArray, "Geo boundedBy filter takes a not null element array as parameter.");
- Assert.isTrue(value instanceof Object[], "Value of a boundedBy filter should be an array of one or two values.");
- Object[] valArray = (Object[]) value;
- Assert.noNullElements(valArray, "Geo boundedBy filter takes a not null element array as parameter.");
+ if (valArray.length == 1) {
+ //GeoEnvelop
+ oneParameterBBox((GeoBoundingBoxFilterBuilder) filter, valArray[0]);
+ } else if (valArray.length == 2) {
+ //2x GeoPoint
+ //2x String
+ twoParameterBBox((GeoBoundingBoxFilterBuilder) filter, valArray);
+ } else {
+ //error
+ Assert.isTrue(false, "Geo distance filter takes a 1-elements array(GeoBox) or 2-elements array(GeoPoints or Strings(format lat,lon or geohash)).");
+ }
+ break;
+ }
+ }
- if (valArray.length == 1) {
- //GeoEnvelop
- oneParameterBBox((GeoBoundingBoxFilterBuilder) filter, valArray[0]);
- } else if (valArray.length == 2) {
- //2x GeoPoint
- //2x String
- twoParameterBBox((GeoBoundingBoxFilterBuilder) filter, valArray);
- } else {
- //error
- Assert.isTrue(false, "Geo distance filter takes a 1-elements array(GeoBox) or 2-elements array(GeoPoints or Strings(format lat,lon or geohash)).");
- }
- break;
- }
+ return filter;
+ }
- }
+ private void oneParameterBBox(GeoBoundingBoxFilterBuilder filter, Object value) {
+ Assert.isTrue(value instanceof GeoBox, "single-element of boundedBy filter must be type of GeoBox");
+ GeoBox geoBBox = (GeoBox) value;
+ filter.topLeft(geoBBox.getTopLeft().getLat(), geoBBox.getTopLeft().getLon());
+ filter.bottomRight(geoBBox.getBottomRight().getLat(), geoBBox.getBottomRight().getLon());
+ }
- return filter;
- }
+ private static boolean isType(Object[] array, Class clazz) {
+ for (Object o : array) {
+ if (!clazz.isInstance(o)) {
+ return false;
+ }
+ }
+ return true;
+ }
- private void oneParameterBBox(GeoBoundingBoxFilterBuilder filter, Object value) {
- Assert.isTrue(value instanceof GeoBox, "single-element of boundedBy filter must be type of GeoBox");
- GeoBox geoBBox = (GeoBox) value;
- filter.topLeft(geoBBox.getTopLeft().getLat(), geoBBox.getTopLeft().getLon());
- filter.bottomRight(geoBBox.getBottomRight().getLat(), geoBBox.getBottomRight().getLon());
- }
+ private void twoParameterBBox(GeoBoundingBoxFilterBuilder filter, Object[] values) {
+ Assert.isTrue(isType(values, GeoPoint.class) || isType(values, String.class), " both elements of boundedBy filter must be type of GeoPoint or String(format lat,lon or geohash)");
+ if (values[0] instanceof GeoPoint) {
+ GeoPoint topLeft = (GeoPoint) values[0];
+ GeoPoint bottomRight = (GeoPoint) values[1];
+ filter.topLeft(topLeft.getLat(), topLeft.getLon());
+ filter.bottomRight(bottomRight.getLat(), bottomRight.getLon());
+ } else {
+ String topLeft = (String) values[0];
+ String bottomRight = (String) values[1];
+ filter.topLeft(topLeft);
+ filter.bottomRight(bottomRight);
+ }
+ }
- private static boolean isType(Object[] array, Class clazz) {
- for (Object o : array) {
- if (!clazz.isInstance(o)) {
- return false;
- }
- }
- return true;
- }
+ private List buildNegationFilter(String fieldName, Iterator it) {
+ List notFilterList = new LinkedList();
- private void twoParameterBBox(GeoBoundingBoxFilterBuilder filter, Object[] values) {
- Assert.isTrue(isType(values, GeoPoint.class) || isType(values, String.class), " both elements of boundedBy filter must be type of GeoPoint or String(format lat,lon or geohash)");
- if (values[0] instanceof GeoPoint) {
- GeoPoint topLeft = (GeoPoint) values[0];
- GeoPoint bottomRight = (GeoPoint) values[1];
- filter.topLeft(topLeft.getLat(), topLeft.getLon());
- filter.bottomRight(bottomRight.getLat(), bottomRight.getLon());
- } else {
- String topLeft = (String) values[0];
- String bottomRight = (String) values[1];
- filter.topLeft(topLeft);
- filter.bottomRight(bottomRight);
- }
- }
+ while (it.hasNext()) {
+ Criteria.CriteriaEntry criteriaEntry = it.next();
+ FilterBuilder notFilter = notFilter(processCriteriaEntry(criteriaEntry.getKey(), criteriaEntry.getValue(), fieldName));
+ notFilterList.add(notFilter);
+ }
- private List buildNegationFilter(String fieldName, Iterator it) {
- List notFilterList = new LinkedList();
-
- while (it.hasNext()) {
- Criteria.CriteriaEntry criteriaEntry = it.next();
- FilterBuilder notFilter = notFilter(processCriteriaEntry(criteriaEntry.getKey(), criteriaEntry.getValue(), fieldName));
- notFilterList.add(notFilter);
- }
-
- return notFilterList;
- }
+ return notFilterList;
+ }
}
diff --git a/src/main/java/org/springframework/data/elasticsearch/core/CriteriaQueryProcessor.java b/src/main/java/org/springframework/data/elasticsearch/core/CriteriaQueryProcessor.java
index d33259183..ba61a8316 100644
--- a/src/main/java/org/springframework/data/elasticsearch/core/CriteriaQueryProcessor.java
+++ b/src/main/java/org/springframework/data/elasticsearch/core/CriteriaQueryProcessor.java
@@ -15,23 +15,23 @@
*/
package org.springframework.data.elasticsearch.core;
-import org.elasticsearch.index.query.BoolQueryBuilder;
-import org.elasticsearch.index.query.BoostableQueryBuilder;
-import org.elasticsearch.index.query.QueryBuilder;
-import org.springframework.data.elasticsearch.core.query.Criteria;
-import org.springframework.util.Assert;
+import static org.elasticsearch.index.query.QueryBuilders.*;
+import static org.springframework.data.elasticsearch.core.query.Criteria.*;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
-import static org.elasticsearch.index.query.QueryBuilders.*;
-import static org.springframework.data.elasticsearch.core.query.Criteria.OperationKey;
+import org.elasticsearch.index.query.BoolQueryBuilder;
+import org.elasticsearch.index.query.BoostableQueryBuilder;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.springframework.data.elasticsearch.core.query.Criteria;
+import org.springframework.util.Assert;
/**
* CriteriaQueryProcessor
- *
+ *
* @author Rizwan Idrees
* @author Mohsin Husen
* @author Franck Marchand
@@ -39,136 +39,133 @@ import static org.springframework.data.elasticsearch.core.query.Criteria.Operati
class CriteriaQueryProcessor {
- QueryBuilder createQueryFromCriteria(Criteria criteria) {
- if(criteria == null)
- return null;
+ QueryBuilder createQueryFromCriteria(Criteria criteria) {
+ if (criteria == null)
+ return null;
- List shouldQueryBuilderList = new LinkedList();
- List mustNotQueryBuilderList = new LinkedList();
- List mustQueryBuilderList = new LinkedList();
+ List shouldQueryBuilderList = new LinkedList();
+ List mustNotQueryBuilderList = new LinkedList();
+ List mustQueryBuilderList = new LinkedList();
+
+ ListIterator chainIterator = criteria.getCriteriaChain().listIterator();
+ while (chainIterator.hasNext()) {
+ Criteria chainedCriteria = chainIterator.next();
+ QueryBuilder queryFragmentForCriteria = createQueryFragmentForCriteria(chainedCriteria);
+
+ if (queryFragmentForCriteria != null) {
+ if (chainedCriteria.isOr()) {
+ shouldQueryBuilderList.add(queryFragmentForCriteria);
+ } else if (chainedCriteria.isNegating()) {
+ mustNotQueryBuilderList.add(queryFragmentForCriteria);
+ } else {
+ mustQueryBuilderList.add(queryFragmentForCriteria);
+ }
+ }
+ }
+
+ BoolQueryBuilder query = null;
+
+ if (!shouldQueryBuilderList.isEmpty() || !mustNotQueryBuilderList.isEmpty() || !mustQueryBuilderList.isEmpty()) {
+
+ query = boolQuery();
+
+ for (QueryBuilder qb : shouldQueryBuilderList) {
+ query.should(qb);
+ }
+ for (QueryBuilder qb : mustNotQueryBuilderList) {
+ query.mustNot(qb);
+ }
+ for (QueryBuilder qb : mustQueryBuilderList) {
+ query.must(qb);
+ }
+ }
+
+ return query;
+ }
- ListIterator chainIterator = criteria.getCriteriaChain().listIterator();
- while (chainIterator.hasNext()) {
- Criteria chainedCriteria = chainIterator.next();
- QueryBuilder queryFragmentForCriteria = createQueryFragmentForCriteria(chainedCriteria);
+ private QueryBuilder createQueryFragmentForCriteria(Criteria chainedCriteria) {
+ if (chainedCriteria.getQueryCriteriaEntries().isEmpty())
+ return null;
- if(queryFragmentForCriteria!=null) {
- if(chainedCriteria.isOr()){
- shouldQueryBuilderList.add(queryFragmentForCriteria);
- }else if(chainedCriteria.isNegating()){
- mustNotQueryBuilderList.add(queryFragmentForCriteria);
- }else{
- mustQueryBuilderList.add(queryFragmentForCriteria);
- }
- }
- }
+ Iterator it = chainedCriteria.getQueryCriteriaEntries().iterator();
+ boolean singeEntryCriteria = (chainedCriteria.getQueryCriteriaEntries().size() == 1);
- BoolQueryBuilder query = null;
+ String fieldName = chainedCriteria.getField().getName();
+ Assert.notNull(fieldName, "Unknown field");
+ QueryBuilder query = null;
- if(!shouldQueryBuilderList.isEmpty() || !mustNotQueryBuilderList.isEmpty() || !mustQueryBuilderList.isEmpty()) {
+ if (singeEntryCriteria) {
+ Criteria.CriteriaEntry entry = it.next();
+ query = processCriteriaEntry(entry.getKey(), entry.getValue(), fieldName);
+ } else {
+ query = boolQuery();
+ while (it.hasNext()) {
+ Criteria.CriteriaEntry entry = it.next();
+ ((BoolQueryBuilder) query).must(processCriteriaEntry(entry.getKey(), entry.getValue(), fieldName));
+ }
+ }
- query = boolQuery();
-
- for(QueryBuilder qb : shouldQueryBuilderList) {
- query.should(qb);
- }
- for(QueryBuilder qb : mustNotQueryBuilderList) {
- query.mustNot(qb);
- }
- for(QueryBuilder qb : mustQueryBuilderList) {
- query.must(qb);
- }
- }
-
- return query;
- }
+ addBoost(query, chainedCriteria.getBoost());
+ return query;
+ }
- private QueryBuilder createQueryFragmentForCriteria(Criteria chainedCriteria) {
- if(chainedCriteria.getQueryCriteriaEntries().isEmpty())
- return null;
+ private QueryBuilder processCriteriaEntry(OperationKey key, Object value, String fieldName) {
+ if (value == null) {
+ return null;
+ }
+ QueryBuilder query = null;
- Iterator it = chainedCriteria.getQueryCriteriaEntries().iterator();
- boolean singeEntryCriteria = (chainedCriteria.getQueryCriteriaEntries().size() == 1);
+ switch (key) {
+ case EQUALS:
+ query = fieldQuery(fieldName, value);
+ break;
+ case CONTAINS:
+ query = fieldQuery(fieldName, "*" + value + "*").analyzeWildcard(true);
+ break;
+ case STARTS_WITH:
+ query = fieldQuery(fieldName, value + "*").analyzeWildcard(true);
+ break;
+ case ENDS_WITH:
+ query = fieldQuery(fieldName, "*" + value).analyzeWildcard(true);
+ break;
+ case EXPRESSION:
+ query = queryString((String) value).field(fieldName);
+ break;
+ case BETWEEN:
+ Object[] ranges = (Object[]) value;
+ query = rangeQuery(fieldName).from(ranges[0]).to(ranges[1]);
+ break;
+ case FUZZY:
+ query = fuzzyQuery(fieldName, (String) value);
+ break;
+ case IN:
+ query = boolQuery();
+ Iterable