DATAES-2 - all code reformatted to follow Spring Data code rules

This commit is contained in:
Artur Konczak 2014-02-11 00:44:03 +00:00
parent f768c1698e
commit e9a260235d
183 changed files with 8023 additions and 8875 deletions

View File

@ -133,7 +133,7 @@ Searching entities using Elasticsearch Template
Indexing a single document with Repository
```java
@Resource
@Autowired
private SampleElasticsearchRepository repository;
String documentId = "123456";
@ -147,7 +147,7 @@ Indexing a single document with Repository
Indexing multiple Document(bulk index) using Repository
```java
@Resource
@Autowired
private SampleElasticsearchRepository repository;
String documentId = "123456";
@ -371,6 +371,11 @@ Here are some ways for you to get involved in the community:
Before we accept a non-trivial patch or pull request we will need you to sign the [contributor's agreement](https://support.springsource.com/spring_committer_signup). Signing the contributor's agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. Active contributors might be asked to join the core team, and given the ability to merge pull requests.
Code formatting for [Eclipse and Intellij](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide)
[More information about contributing to Spring Data](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.md)
### Contact Details
* Rizwan Idrees (rizwan.idrees@biomedcentral.com)

View File

@ -1,820 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<profiles version="12">
<profile kind="CodeFormatterProfile" name="Spring Data"
version="12">
<setting
id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags"
value="insert" />
<setting id="org.eclipse.jdt.core.formatter.disabling_tag"
value="@formatter:off" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration"
value="end_of_line" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation"
value="do not insert" />
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field"
value="0" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while"
value="do not insert" />
<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line"
value="false" />
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression"
value="80" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer"
value="end_of_line" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while"
value="insert" />
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package"
value="1" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters"
value="insert" />
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation"
value="2" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces"
value="do not insert" />
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk"
value="1" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator"
value="insert" />
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package"
value="0" />
<setting id="org.eclipse.jdt.core.compiler.source" value="1.7" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration"
value="do not insert" />
<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations"
value="insert" />
<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type"
value="1" />
<setting
id="org.eclipse.jdt.core.formatter.align_type_members_on_columns"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration"
value="insert" />
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="120" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration"
value="0" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method"
value="insert" />
<setting id="org.eclipse.jdt.core.formatter.indentation.size"
value="2" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration"
value="do not insert" />
<setting id="org.eclipse.jdt.core.formatter.enabling_tag"
value="@formatter:on" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration"
value="16" />
<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment"
value="0" />
<setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier"
value="error" />
<setting id="org.eclipse.jdt.core.formatter.tabulation.char"
value="tab" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body"
value="true" />
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method"
value="1" />
<setting
id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration"
value="end_of_line" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration"
value="0" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if"
value="do not insert" />
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch"
value="end_of_line" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments"
value="do not insert" />
<setting id="org.eclipse.jdt.core.compiler.problem.enumIdentifier"
value="error" />
<setting
id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch"
value="false" />
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis"
value="do not insert" />
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block"
value="end_of_line" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration"
value="end_of_line" />
<setting id="org.eclipse.jdt.core.formatter.compact_else_if"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant"
value="end_of_line" />
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression"
value="do not insert" />
<setting id="org.eclipse.jdt.core.formatter.tabulation.size"
value="2" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator"
value="insert" />
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case"
value="end_of_line" />
<setting
id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve"
value="1" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter"
value="insert" />
<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.7" />
<setting
id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer"
value="2" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration"
value="insert" />
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration"
value="end_of_line" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while"
value="do not insert" />
<setting id="org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode"
value="enabled" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line"
value="true" />
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments"
value="true" />
<setting id="org.eclipse.jdt.core.formatter.comment.line_length"
value="120" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups"
value="1" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration"
value="end_of_line" />
<setting
id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body"
value="0" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration"
value="do not insert" />
<setting id="org.eclipse.jdt.core.formatter.wrap_before_binary_operator"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations"
value="1" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration"
value="16" />
<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations"
value="do not insert" />
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration"
value="do not insert" />
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports"
value="1" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert"
value="insert" />
<setting id="org.eclipse.jdt.core.formatter.comment.format_html"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator"
value="do not insert" />
<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration"
value="16" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer"
value="insert" />
<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform"
value="1.7" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try"
value="80" />
<setting
id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation"
value="0" />
<setting id="org.eclipse.jdt.core.formatter.comment.format_header"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.comment.format_block_comments"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant"
value="do not insert" />
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants"
value="0" />
<setting
id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration"
value="end_of_line" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries"
value="true" />
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports"
value="1" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header"
value="true" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for"
value="insert" />
<setting
id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments"
value="do not insert" />
<setting
id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column"
value="false" />
<setting
id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line"
value="false" />
</profile>
</profiles>

15
pom.xml
View File

@ -1,5 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.springframework.data</groupId>
@ -152,13 +153,13 @@
</repositories>
<pluginRepositories>
<pluginRepository>
<id>spring-plugins-release</id>
<url>http://repo.spring.io/plugins-release</url>
</pluginRepository>
</pluginRepositories>
<pluginRepository>
<id>spring-plugins-release</id>
<url>http://repo.spring.io/plugins-release</url>
</pluginRepository>
</pluginRepositories>
<scm>
<scm>
<url>https://github.com/SpringSource/spring-data-elasticsearch</url>
<connection>scm:git:git://github.com/SpringSource/spring-data-elasticsearch.git</connection>
<developerConnection>scm:git:ssh://git@github.com:SpringSource/spring-data-elasticsearch.git

View File

@ -1,62 +1,62 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.4//EN"
"http://www.oasis-open.org/docbook/xml/4.4/docbookx.dtd">
"http://www.oasis-open.org/docbook/xml/4.4/docbookx.dtd">
<book xmlns:xi="http://www.w3.org/2001/XInclude">
<bookinfo>
<title>Spring Data Elasticsearch</title>
<authorgroup>
<author>
<firstname>BioMed Central</firstname>
<surname>Development Team</surname>
</author>
</authorgroup>
<legalnotice>
<para>
Copies of this document may be made for your own use and for
distribution to others, provided that you do not
charge any fee for
such copies and further provided that each copy
contains this
Copyright Notice, whether
distributed in print or electronically.
</para>
</legalnotice>
<bookinfo>
<title>Spring Data Elasticsearch</title>
<authorgroup>
<author>
<firstname>BioMed Central</firstname>
<surname>Development Team</surname>
</author>
</authorgroup>
<legalnotice>
<para>
Copies of this document may be made for your own use and for
distribution to others, provided that you do not
charge any fee for
such copies and further provided that each copy
contains this
Copyright Notice, whether
distributed in print or electronically.
</para>
</legalnotice>
<copyright>
<year>2013</year>
<holder>The original author(s)</holder>
</copyright>
</bookinfo>
<copyright>
<year>2013</year>
<holder>The original author(s)</holder>
</copyright>
</bookinfo>
<toc />
<toc/>
<xi:include href="preface.xml" />
<xi:include href="preface.xml"/>
<part id="reference">
<title>Reference Documentation</title>
<part id="reference">
<title>Reference Documentation</title>
<xi:include
href="https://raw.github.com/SpringSource/spring-data-commons/1.4.0.RC1/src/docbkx/repositories.xml">
<xi:fallback
href="../../../spring-data-commons/src/docbkx/repositories.xml" />
</xi:include>
<xi:include
href="https://raw.github.com/SpringSource/spring-data-commons/1.4.0.RC1/src/docbkx/repositories.xml">
<xi:fallback
href="../../../spring-data-commons/src/docbkx/repositories.xml"/>
</xi:include>
<xi:include href="reference/data-elasticsearch.xml" />
<xi:include href="reference/elasticsearch-misc.xml" />
</part>
<xi:include href="reference/data-elasticsearch.xml"/>
<xi:include href="reference/elasticsearch-misc.xml"/>
</part>
<part id="appendix">
<title>Appendix</title>
<xi:include
href="https://raw.github.com/SpringSource/spring-data-commons/1.4.0.RC1/src/docbkx/repository-namespace-reference.xml">
<xi:fallback
href="../../../spring-data-commons/src/docbkx/repository-namespace-reference.xml" />
</xi:include>
<xi:include
href="https://raw.github.com/SpringSource/spring-data-commons/1.4.0.RC1/src/docbkx/repository-query-keywords-reference.xml">
<xi:fallback
href="../../../spring-data-commons/src/docbkx/repository-query-keywords-reference.xml" />
</xi:include>
</part>
<part id="appendix">
<title>Appendix</title>
<xi:include
href="https://raw.github.com/SpringSource/spring-data-commons/1.4.0.RC1/src/docbkx/repository-namespace-reference.xml">
<xi:fallback
href="../../../spring-data-commons/src/docbkx/repository-namespace-reference.xml"/>
</xi:include>
<xi:include
href="https://raw.github.com/SpringSource/spring-data-commons/1.4.0.RC1/src/docbkx/repository-query-keywords-reference.xml">
<xi:fallback
href="../../../spring-data-commons/src/docbkx/repository-query-keywords-reference.xml"/>
</xi:include>
</part>
</book>

View File

@ -1,39 +1,40 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE preface PUBLIC "-//OASIS//DTD DocBook XML V4.4//EN"
"http://www.oasis-open.org/docbook/xml/4.4/docbookx.dtd">
"http://www.oasis-open.org/docbook/xml/4.4/docbookx.dtd">
<preface id="preface">
<title>Preface</title>
<para>The Spring Data Elasticsearch project applies core Spring
concepts to
the
development of solutions using the Elasticsearch Search
Engine.
We have povided a "template" as a high-level abstraction for
storing,querying,sorting and faceting documents. You will notice
similarities
to the Spring data solr and
mongodb support in the Spring Framework.
</para>
<section id="project">
<title>Project Metadata</title>
<itemizedlist spacing="compact">
<listitem>
<para>
Version Control -
<ulink
url="git://github.com/BioMedCentralLtd/spring-data-elasticsearch.git">git://github.com/BioMedCentralLtd/spring-data-elasticsearch.git
</ulink>
</para>
</listitem>
</itemizedlist>
</section>
<section id="requirements">
<title>Requirements</title>
<para>
Requires
<ulink url="http://www.elasticsearch.org/download/">Elasticsearch</ulink>
0.20.2 and above or optional dependency or not even that if you are
using Embedded Node Client
</para>
</section>
<title>Preface</title>
<para>The Spring Data Elasticsearch project applies core Spring
concepts to
the
development of solutions using the Elasticsearch Search
Engine.
We have povided a "template" as a high-level abstraction for
storing,querying,sorting and faceting documents. You will notice
similarities
to the Spring data solr and
mongodb support in the Spring Framework.
</para>
<section id="project">
<title>Project Metadata</title>
<itemizedlist spacing="compact">
<listitem>
<para>
Version Control -
<ulink
url="git://github.com/BioMedCentralLtd/spring-data-elasticsearch.git">
git://github.com/BioMedCentralLtd/spring-data-elasticsearch.git
</ulink>
</para>
</listitem>
</itemizedlist>
</section>
<section id="requirements">
<title>Requirements</title>
<para>
Requires
<ulink url="http://www.elasticsearch.org/download/">Elasticsearch</ulink>
0.20.2 and above or optional dependency or not even that if you are
using Embedded Node Client
</para>
</section>
</preface>

View File

@ -1,487 +1,508 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.4//EN"
"http://www.oasis-open.org/docbook/xml/4.4/docbookx.dtd">
"http://www.oasis-open.org/docbook/xml/4.4/docbookx.dtd">
<chapter id="elasticsearch.repositories">
<title>Elasticsearch Repositories</title>
<abstract>
<para>This chapter includes details of the Elasticsearch repository
implementation.
</para>
</abstract>
<section id="elasticsearch.introduction">
<title>Introduction</title>
<title>Elasticsearch Repositories</title>
<abstract>
<para>This chapter includes details of the Elasticsearch repository
implementation.
</para>
</abstract>
<section id="elasticsearch.introduction">
<title>Introduction</title>
<section id="elasticsearch.namespace">
<title>Spring Namespace</title>
<section id="elasticsearch.namespace">
<title>Spring Namespace</title>
<para>
The Spring Data Elasticsearch module contains a custom namespace
allowing
definition of repository beans as well as elements for
instantiating
a
<classname>ElasticsearchServer</classname>
.
</para>
<para>
The Spring Data Elasticsearch module contains a custom namespace
allowing
definition of repository beans as well as elements for
instantiating
a
<classname>ElasticsearchServer</classname>
.
</para>
<para>
Using the
<code>repositories</code>
element looks up Spring Data repositories as described in
<xref linkend="repositories.create-instances" />
.
</para>
<para>
Using the
<code>repositories</code>
element looks up Spring Data repositories as described in
<xref linkend="repositories.create-instances"/>
.
</para>
<example>
<title>Setting up Elasticsearch repositories using Namespace</title>
<programlisting language="xml">&lt;?xml version="1.0" encoding="UTF-8"?&gt;
&lt;beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:elasticsearch="http://www.springframework.org/schema/data/elasticsearch"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://www.springframework.org/schema/data/elasticsearch
http://www.springframework.org/schema/data/elasticsearch/spring-elasticsearch-1.0.xsd"&gt;
<example>
<title>Setting up Elasticsearch repositories using Namespace</title>
<programlisting language="xml">&lt;?xml version="1.0" encoding="UTF-8"?&gt;
&lt;beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:elasticsearch="http://www.springframework.org/schema/data/elasticsearch"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://www.springframework.org/schema/data/elasticsearch
http://www.springframework.org/schema/data/elasticsearch/spring-elasticsearch-1.0.xsd"&gt;
&lt;elasticsearch:repositories base-package="com.acme.repositories" /&gt;
&lt;/beans&gt;</programlisting>
</example>
&lt;elasticsearch:repositories base-package="com.acme.repositories" /&gt;
&lt;/beans&gt;</programlisting>
</example>
<para>
Using the
<code>Transport Client</code>
or
<code>Node Client</code>
element registers an instance of
<code>Elasticsearch Server</code>
in the context.
<para>
Using the
<code>Transport Client</code>
or
<code>Node Client</code>
element registers an instance of
<code>Elasticsearch Server</code>
in the context.
<example>
<title>Transport Client using Namespace</title>
<programlisting language="xml">&lt;?xml version="1.0" encoding="UTF-8"?&gt;
&lt;beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:elasticsearch="http://www.springframework.org/schema/data/elasticsearch"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://www.springframework.org/schema/data/elasticsearch
http://www.springframework.org/schema/data/elasticsearch/spring-elasticsearch-1.0.xsd"&gt;
<example>
<title>Transport Client using Namespace</title>
<programlisting language="xml">&lt;?xml version="1.0" encoding="UTF-8"?&gt;
&lt;beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:elasticsearch="http://www.springframework.org/schema/data/elasticsearch"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://www.springframework.org/schema/data/elasticsearch
http://www.springframework.org/schema/data/elasticsearch/spring-elasticsearch-1.0.xsd"&gt;
&lt;elasticsearch:transport-client id="client" cluster-nodes="localhost:9300,someip:9300" /&gt;
&lt;/beans&gt; </programlisting>
</example>
&lt;elasticsearch:transport-client id="client" cluster-nodes="localhost:9300,someip:9300" /&gt;
&lt;/beans&gt; </programlisting>
</example>
<example>
<title>Node Client using Namespace</title>
<programlisting language="xml">&lt;?xml version="1.0" encoding="UTF-8"?&gt;
&lt;beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:elasticsearch="http://www.springframework.org/schema/data/elasticsearch"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://www.springframework.org/schema/data/elasticsearch
http://www.springframework.org/schema/data/elasticsearch/spring-elasticsearch-1.0.xsd"&gt;
<example>
<title>Node Client using Namespace</title>
<programlisting language="xml">&lt;?xml version="1.0" encoding="UTF-8"?&gt;
&lt;beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:elasticsearch="http://www.springframework.org/schema/data/elasticsearch"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://www.springframework.org/schema/data/elasticsearch
http://www.springframework.org/schema/data/elasticsearch/spring-elasticsearch-1.0.xsd"&gt;
&lt;elasticsearch:node-client id="client" local="true"" /&gt;
&lt;/beans&gt; </programlisting>
</example>
</para>
</section>
<section id="elasticsearch.annotation">
<title>Annotation based configuration</title>
<para>The Spring Data Elasticsearch repositories support cannot only
be
activated through an XML namespace but also using an annotation
through JavaConfig.
</para>
<example>
<title>Spring Data Elasticsearch repositories using JavaConfig
</title>
<programlisting language="java">
@Configuration
@EnableElasticsearchRepositories(basePackages = "org/springframework/data/elasticsearch/repositories")
static class Config {
&lt;elasticsearch:node-client id="client" local="true"" /&gt;
&lt;/beans&gt; </programlisting>
</example>
</para>
</section>
<section id="elasticsearch.annotation">
<title>Annotation based configuration</title>
<para>The Spring Data Elasticsearch repositories support cannot only
be
activated through an XML namespace but also using an annotation
through JavaConfig.
</para>
<example>
<title>Spring Data Elasticsearch repositories using JavaConfig
</title>
<programlisting language="java">
@Configuration
@EnableElasticsearchRepositories(basePackages =
"org/springframework/data/elasticsearch/repositories")
static class Config {
@Bean
public ElasticsearchOperations elasticsearchTemplate() {
return new ElasticsearchTemplate(nodeBuilder().local(true).node().client());
}
}</programlisting>
<para>
The configuration above sets up an
<classname>Embedded Elasticsearch Server</classname>
which is used by the
<classname>ElasticsearchTemplate</classname>
. Spring Data Elasticsearch Repositories are activated using the
<interfacename>@EnableElasticsearchRepositories</interfacename>
annotation, which
essentially carries the same attributes as the XML
namespace does. If no
base package is configured, it will use the
one
the configuration class
resides in.
</para>
</example>
</section>
<section id="elasticsearch.cdi">
<title>Elasticsearch Repositores using CDI</title>
<para>The Spring Data Elasticsearch repositories can also be set up
using CDI
functionality.
</para>
<example>
<title>Spring Data Elasticsearch repositories using JavaConfig
</title>
<programlisting language="java">class ElasticsearchTemplateProducer {
@Bean
public ElasticsearchOperations elasticsearchTemplate() {
return new ElasticsearchTemplate(nodeBuilder().local(true).node().client());
}
}
</programlisting>
<para>
The configuration above sets up an
<classname>Embedded Elasticsearch Server</classname>
which is used by the
<classname>ElasticsearchTemplate</classname>
. Spring Data Elasticsearch Repositories are activated using the
<interfacename>@EnableElasticsearchRepositories</interfacename>
annotation, which
essentially carries the same attributes as the XML
namespace does. If no
base package is configured, it will use the
one
the configuration class
resides in.
</para>
</example>
</section>
<section id="elasticsearch.cdi">
<title>Elasticsearch Repositores using CDI</title>
<para>The Spring Data Elasticsearch repositories can also be set up
using CDI
functionality.
</para>
<example>
<title>Spring Data Elasticsearch repositories using JavaConfig
</title>
<programlisting language="java">class ElasticsearchTemplateProducer {
@Produces
@ApplicationScoped
public ElasticsearchOperations createElasticsearchTemplate() {
return new ElasticsearchTemplate(nodeBuilder().local(true).node().client());
}
}
@Produces
@ApplicationScoped
public ElasticsearchOperations createElasticsearchTemplate() {
return new ElasticsearchTemplate(nodeBuilder().local(true).node().client());
}
}
class ProductService {
class ProductService {
private ProductRepository repository;
private ProductRepository repository;
public Page&lt;Product&gt; findAvailableBookByName(String name, Pageable pageable) {
return repository.findByAvailableTrueAndNameStartingWith(name, pageable);
}
public Page&lt;Product&gt; findAvailableBookByName(String name, Pageable pageable) {
return repository.findByAvailableTrueAndNameStartingWith(name, pageable);
}
@Inject
public void setRepository(ProductRepository repository) {
this.repository = repository;
}
}</programlisting>
</example>
</section>
</section>
<section id="elasticsearch.query-methods">
<title>Query methods</title>
<section id="elasticsearch.query-methods.finders">
<title>Query lookup strategies</title>
<para>
The Elasticsearch module supports all basic query building
feature as String,Abstract,Criteria or
have
it being derived from the
method name.
</para>
@Inject
public void setRepository(ProductRepository repository) {
this.repository = repository;
}
}
</programlisting>
</example>
</section>
</section>
<section id="elasticsearch.query-methods">
<title>Query methods</title>
<section id="elasticsearch.query-methods.finders">
<title>Query lookup strategies</title>
<para>
The Elasticsearch module supports all basic query building
feature as String,Abstract,Criteria or
have
it being derived from the
method name.
</para>
<simplesect>
<title>Declared queries</title>
<para>
Deriving the query from the method name is not always sufficient
and/or may result in unreadable method names. In this case one
might make either use of
<interfacename>@Query</interfacename>
annotation (see
<xref linkend="elasticsearch.query-methods.at-query" />
).
</para>
</simplesect>
</section>
<simplesect>
<title>Declared queries</title>
<para>
Deriving the query from the method name is not always sufficient
and/or may result in unreadable method names. In this case one
might make either use of
<interfacename>@Query</interfacename>
annotation (see
<xref linkend="elasticsearch.query-methods.at-query"/>
).
</para>
</simplesect>
</section>
<section id="elasticsearch.query-methods.criterions">
<title>Query creation</title>
<section id="elasticsearch.query-methods.criterions">
<title>Query creation</title>
<para>
Generally the query creation mechanism for Elasticsearch works as
described
in
<xref linkend="repositories.query-methods" />
. Here's a short example
of what a Elasticsearch query method
translates into:
<example>
<title>Query creation from method names</title>
<programlisting language="java">public interface BookRepository extends Repository&lt;Book, String&gt; {
List&lt;Book&gt; findByNameAndPrice(String name, Integer price);
}</programlisting>
<para>
The method name above will be translated into the following
Elasticsearch json query
</para>
<programlisting>
<para>
Generally the query creation mechanism for Elasticsearch works as
described
in
<xref linkend="repositories.query-methods"/>
. Here's a short example
of what a Elasticsearch query method
translates into:
<example>
<title>Query creation from method names</title>
<programlisting language="java">public interface BookRepository extends Repository&lt;Book, String&gt;
{
List&lt;Book&gt; findByNameAndPrice(String name, Integer price);
}
</programlisting>
<para>
The method name above will be translated into the following
Elasticsearch json query
</para>
<programlisting>
{ "bool" :
{ "must" :
[
{ "field" : {"name" : "?"} },
{ "field" : {"price" : "?"} }
] } }</programlisting>
</example>
</para>
<para>
A list of supported keywords for Elasticsearch is shown below.
<table>
<title>Supported keywords inside method names</title>
<tgroup cols="3">
<colspec colwidth="1*" />
<colspec colwidth="2*" />
<colspec colwidth="3*" />
<thead>
<row>
<entry>Keyword</entry>
<entry>Sample</entry>
<entry>Elasticsearch Query String</entry>
</row>
</thead>
<tbody>
<row>
<entry>
<code>And</code>
</entry>
<entry>
<code>findByNameAndPrice</code>
</entry>
<entry>
<code>{"bool" : {"must" : [ {"field" : {"name" : "?"}},
{"field" : {"price" : "?"}} ]}}</code>
</entry>
</row>
<row>
<entry>
<code>Or</code>
</entry>
<entry>
<code>findByNameOrPrice</code>
</entry>
<entry>
<code>{"bool" : {"should" : [ {"field" : {"name" : "?"}},
{"field" : {"price" : "?"}} ]}}</code>
</entry>
</row>
<row>
<entry>
<code>Is</code>
</entry>
<entry>
<code>findByName</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"name" : "?"}}}}</code>
</entry>
</row>
<row>
<entry>
<code>Not</code>
</entry>
<entry>
<code>findByNameNot</code>
</entry>
<entry>
<code>{"bool" : {"must_not" : {"field" : {"name" : "?"}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>Between</code>
</entry>
<entry>
<code>findByPriceBetween</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"range" : {"price" : {"from" :
?,"to" : ?,"include_lower" : true,"include_upper" : true}}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>LessThanEqual</code>
</entry>
<entry>
<code>findByPriceLessThan</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"range" : {"price" : {"from" :
null,"to" : ?,"include_lower" : true,"include_upper" :
true}}}}}</code>
</entry>
</row>
<row>
<entry>
<code>GreaterThanEqual</code>
</entry>
<entry>
<code>findByPriceGreaterThan</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"range" : {"price" : {"from" :
?,"to" : null,"include_lower" : true,"include_upper" :
true}}}}}</code>
</entry>
</row>
<row>
<entry>
<code>Before</code>
</entry>
<entry>
<code>findByPriceBefore</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"range" : {"price" : {"from" :
null,"to" : ?,"include_lower" : true,"include_upper" :
true}}}}}</code>
</entry>
</row>
<row>
<entry>
<code>After</code>
</entry>
<entry>
<code>findByPriceAfter</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"range" : {"price" : {"from" :
?,"to" : null,"include_lower" : true,"include_upper" :
true}}}}}</code>
</entry>
</row>
<row>
<entry>
<code>Like</code>
</entry>
<entry>
<code>findByNameLike</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"name" : {"query" :
"?*","analyze_wildcard" : true}}}}}</code>
</entry>
</row>
<row>
<entry>
<code>StartingWith</code>
</entry>
<entry>
<code>findByNameStartingWith</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"name" : {"query" :
"?*","analyze_wildcard" : true}}}}}</code>
</entry>
</row>
<row>
<entry>
<code>EndingWith</code>
</entry>
<entry>
<code>findByNameEndingWith</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"name" : {"query" :
"*?","analyze_wildcard" : true}}}}}</code>
</entry>
</row>
<row>
<entry>
<code>Contains/Containing</code>
</entry>
<entry>
<code>findByNameContaining</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"name" : {"query" :
"*?*","analyze_wildcard" : true}}}}}</code>
</entry>
</row>
<row>
<entry>
<code>In</code>
</entry>
<entry>
<code>findByNameIn(Collection&lt;String&gt;names)</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"bool" : {"should" : [ {"field" :
{"name" : "?"}}, {"field" : {"name" : "?"}} ]}}}}</code>
</entry>
</row>
<row>
<entry>
<code>NotIn</code>
</entry>
<entry>
<code>findByNameNotIn(Collection&lt;String&gt;names)</code>
</entry>
<entry>
<code>{"bool" : {"must_not" : {"bool" : {"should" : {"field" :
{"name" : "?"}}}}}}</code>
</entry>
</row>
<row>
<entry>
<code>Near</code>
</entry>
<entry>
<code>findByStoreNear</code>
</entry>
<entry>
<code>Not Supported Yet !</code>
</entry>
</row>
<row>
<entry>
<code>True</code>
</entry>
<entry>
<code>findByAvailableTrue</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"available" : true}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>False</code>
</entry>
<entry>
<code>findByAvailableFalse</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"available" : false}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>OrderBy</code>
</entry>
<entry>
<code>findByAvailableTrueOrderByNameDesc</code>
</entry>
<entry>
<code>{"sort" : [{ "name" : {"order" : "desc"} }],"bool" :
{"must" : {"field" : {"available" : true}}}}</code>
</entry>
</row>
</tbody>
</tgroup>
</table>
</para>
</section>
<section id="elasticsearch.query-methods.at-query">
<title>Using @Query Annotation</title>
<example>
<title>
Declare query at the method using the
<interfacename>@Query</interfacename>
annotation.
</title>
] } }
</programlisting>
</example>
</para>
<para>
A list of supported keywords for Elasticsearch is shown below.
<table>
<title>Supported keywords inside method names</title>
<tgroup cols="3">
<colspec colwidth="1*"/>
<colspec colwidth="2*"/>
<colspec colwidth="3*"/>
<thead>
<row>
<entry>Keyword</entry>
<entry>Sample</entry>
<entry>Elasticsearch Query String</entry>
</row>
</thead>
<tbody>
<row>
<entry>
<code>And</code>
</entry>
<entry>
<code>findByNameAndPrice</code>
</entry>
<entry>
<code>{"bool" : {"must" : [ {"field" : {"name" : "?"}},
{"field" : {"price" : "?"}} ]}}
</code>
</entry>
</row>
<row>
<entry>
<code>Or</code>
</entry>
<entry>
<code>findByNameOrPrice</code>
</entry>
<entry>
<code>{"bool" : {"should" : [ {"field" : {"name" : "?"}},
{"field" : {"price" : "?"}} ]}}
</code>
</entry>
</row>
<row>
<entry>
<code>Is</code>
</entry>
<entry>
<code>findByName</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"name" : "?"}}}}</code>
</entry>
</row>
<row>
<entry>
<code>Not</code>
</entry>
<entry>
<code>findByNameNot</code>
</entry>
<entry>
<code>{"bool" : {"must_not" : {"field" : {"name" : "?"}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>Between</code>
</entry>
<entry>
<code>findByPriceBetween</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"range" : {"price" : {"from" :
?,"to" : ?,"include_lower" : true,"include_upper" : true}}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>LessThanEqual</code>
</entry>
<entry>
<code>findByPriceLessThan</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"range" : {"price" : {"from" :
null,"to" : ?,"include_lower" : true,"include_upper" :
true}}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>GreaterThanEqual</code>
</entry>
<entry>
<code>findByPriceGreaterThan</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"range" : {"price" : {"from" :
?,"to" : null,"include_lower" : true,"include_upper" :
true}}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>Before</code>
</entry>
<entry>
<code>findByPriceBefore</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"range" : {"price" : {"from" :
null,"to" : ?,"include_lower" : true,"include_upper" :
true}}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>After</code>
</entry>
<entry>
<code>findByPriceAfter</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"range" : {"price" : {"from" :
?,"to" : null,"include_lower" : true,"include_upper" :
true}}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>Like</code>
</entry>
<entry>
<code>findByNameLike</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"name" : {"query" :
"?*","analyze_wildcard" : true}}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>StartingWith</code>
</entry>
<entry>
<code>findByNameStartingWith</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"name" : {"query" :
"?*","analyze_wildcard" : true}}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>EndingWith</code>
</entry>
<entry>
<code>findByNameEndingWith</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"name" : {"query" :
"*?","analyze_wildcard" : true}}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>Contains/Containing</code>
</entry>
<entry>
<code>findByNameContaining</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"name" : {"query" :
"*?*","analyze_wildcard" : true}}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>In</code>
</entry>
<entry>
<code>findByNameIn(Collection&lt;String&gt;names)</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"bool" : {"should" : [ {"field" :
{"name" : "?"}}, {"field" : {"name" : "?"}} ]}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>NotIn</code>
</entry>
<entry>
<code>findByNameNotIn(Collection&lt;String&gt;names)</code>
</entry>
<entry>
<code>{"bool" : {"must_not" : {"bool" : {"should" : {"field" :
{"name" : "?"}}}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>Near</code>
</entry>
<entry>
<code>findByStoreNear</code>
</entry>
<entry>
<code>Not Supported Yet !</code>
</entry>
</row>
<row>
<entry>
<code>True</code>
</entry>
<entry>
<code>findByAvailableTrue</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"available" : true}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>False</code>
</entry>
<entry>
<code>findByAvailableFalse</code>
</entry>
<entry>
<code>{"bool" : {"must" : {"field" : {"available" : false}}}}
</code>
</entry>
</row>
<row>
<entry>
<code>OrderBy</code>
</entry>
<entry>
<code>findByAvailableTrueOrderByNameDesc</code>
</entry>
<entry>
<code>{"sort" : [{ "name" : {"order" : "desc"} }],"bool" :
{"must" : {"field" : {"available" : true}}}}
</code>
</entry>
</row>
</tbody>
</tgroup>
</table>
</para>
</section>
<section id="elasticsearch.query-methods.at-query">
<title>Using @Query Annotation</title>
<example>
<title>
Declare query at the method using the
<interfacename>@Query</interfacename>
annotation.
</title>
<programlisting language="java">public interface BookRepository extends ElasticsearchRepository&lt;Book, String&gt; {
@Query("{"bool" : {"must" : {"field" : {"name" : "?0"}}}}")
Page&lt;Book&gt; findByName(String name,Pageable pageable);
}</programlisting>
</example>
</section>
<programlisting language="java">public interface BookRepository extends ElasticsearchRepository&lt;Book,
String&gt; {
@Query("{"bool" : {"must" : {"field" : {"name" : "?0"}}}}")
Page&lt;Book&gt; findByName(String name,Pageable pageable);
}
</programlisting>
</example>
</section>
</section>
</section>
</chapter>

View File

@ -1,85 +1,88 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.4//EN"
"http://www.oasis-open.org/docbook/xml/4.4/docbookx.dtd">
"http://www.oasis-open.org/docbook/xml/4.4/docbookx.dtd">
<chapter id="elasticsearch.misc">
<title>Miscellaneous Elasticsearch Operation Support</title>
<abstract>
<para>
This chapter covers additional support for Elasticsearch operations
that cannot be directly accessed via the repository
interface.
It is
recommended to add those operations as custom
implementation as
described in
<xref linkend="repositories.custom-implementations" />
.
</para>
</abstract>
<section id="elasticsearch.misc.filter">
<title>Filter Builder</title>
<para>
Filter Builder improves query speed.
</para>
<example>
<programlisting language="java">
private ElasticsearchTemplate elasticsearchTemplate;
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(matchAllQuery())
.withFilter(boolFilter().must(termFilter("id", documentId)))
.build();
Page&lt;SampleEntity&gt; sampleEntities = elasticsearchTemplate.queryForPage(searchQuery,SampleEntity.class);
</programlisting>
</example>
</section>
<section id="elasticsearch.scan.and.scroll">
<title>Using Scan And Scroll For Big Result Set</title>
<para>
Elasticsearch has scan and scroll feature for getting big result set
in chunks.
<interfacename>ElasticsearchTemplate</interfacename>
has scan and scroll methods that can be used as below.
</para>
<example>
<title>
Using Scan and Scroll
</title>
<programlisting language="java">
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(matchAllQuery())
.withIndices("test-index")
.withTypes("test-type")
.withPageable(new PageRequest(0,1))
.build();
String scrollId = elasticsearchTemplate.scan(searchQuery,1000,false);
List&lt;SampleEntity&gt; sampleEntities = new ArrayList&lt;SampleEntity&gt;();
boolean hasRecords = true;
while (hasRecords){
Page&lt;SampleEntity&gt; page = elasticsearchTemplate.scroll(scrollId, 5000L , new ResultsMapper&lt;SampleEntity&gt;() {
@Override
public Page&lt;SampleEntity&gt; mapResults(SearchResponse response) {
List&lt;SampleEntity&gt; chunk = new ArrayList&lt;SampleEntity&gt;();
for(SearchHit searchHit : response.getHits()){
if(response.getHits().getHits().length &lt;= 0) {
return null;
}
SampleEntity user = new SampleEntity();
user.setId(searchHit.getId());
user.setMessage((String)searchHit.getSource().get("message"));
chunk.add(user);
}
return new PageImpl&lt;SampleEntity&gt;(chunk);
}
});
if(page != null) {
sampleEntities.addAll(page.getContent());
hasRecords = page.hasNextPage();
}
else{
hasRecords = false;
}
}
}</programlisting>
</example>
</section>
<title>Miscellaneous Elasticsearch Operation Support</title>
<abstract>
<para>
This chapter covers additional support for Elasticsearch operations
that cannot be directly accessed via the repository
interface.
It is
recommended to add those operations as custom
implementation as
described in
<xref linkend="repositories.custom-implementations"/>
.
</para>
</abstract>
<section id="elasticsearch.misc.filter">
<title>Filter Builder</title>
<para>
Filter Builder improves query speed.
</para>
<example>
<programlisting language="java">
private ElasticsearchTemplate elasticsearchTemplate;
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(matchAllQuery())
.withFilter(boolFilter().must(termFilter("id", documentId)))
.build();
Page&lt;SampleEntity&gt; sampleEntities =
elasticsearchTemplate.queryForPage(searchQuery,SampleEntity.class);
</programlisting>
</example>
</section>
<section id="elasticsearch.scan.and.scroll">
<title>Using Scan And Scroll For Big Result Set</title>
<para>
Elasticsearch has scan and scroll feature for getting big result set
in chunks.
<interfacename>ElasticsearchTemplate</interfacename>
has scan and scroll methods that can be used as below.
</para>
<example>
<title>
Using Scan and Scroll
</title>
<programlisting language="java">
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(matchAllQuery())
.withIndices("test-index")
.withTypes("test-type")
.withPageable(new PageRequest(0,1))
.build();
String scrollId = elasticsearchTemplate.scan(searchQuery,1000,false);
List&lt;SampleEntity&gt; sampleEntities = new ArrayList&lt;SampleEntity&gt;();
boolean hasRecords = true;
while (hasRecords){
Page&lt;SampleEntity&gt; page = elasticsearchTemplate.scroll(scrollId, 5000L , new ResultsMapper&lt;SampleEntity&gt;()
{
@Override
public Page&lt;SampleEntity&gt; mapResults(SearchResponse response) {
List&lt;SampleEntity&gt; chunk = new ArrayList&lt;SampleEntity&gt;();
for(SearchHit searchHit : response.getHits()){
if(response.getHits().getHits().length &lt;= 0) {
return null;
}
SampleEntity user = new SampleEntity();
user.setId(searchHit.getId());
user.setMessage((String)searchHit.getSource().get("message"));
chunk.add(user);
}
return new PageImpl&lt;SampleEntity&gt;(chunk);
}
});
if(page != null) {
sampleEntities.addAll(page.getContent());
hasRecords = page.hasNextPage();
}
else{
hasRecords = false;
}
}
}
</programlisting>
</example>
</section>
</chapter>

File diff suppressed because it is too large Load Diff

View File

@ -19,7 +19,7 @@ import java.util.Map;
/**
* ElasticsearchException
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/

View File

@ -17,16 +17,15 @@ package org.springframework.data.elasticsearch.annotations;
/**
* @author Jakub Vavrik
*
* Values based on reference doc - http://www.elasticsearch.org/guide/reference/mapping/date-format/
* Values based on reference doc - http://www.elasticsearch.org/guide/reference/mapping/date-format/
*/
public enum DateFormat {
none, custom, basic_date, basic_date_time, basic_date_time_no_millis, basic_ordinal_date, basic_ordinal_date_time,
basic_ordinal_date_time_no_millis, basic_time, basic_time_no_millis, basic_t_time, basic_t_time_no_millis,
basic_week_date, basic_week_date_time, basic_week_date_time_no_millis, date, date_hour, date_hour_minute,
date_hour_minute_second, date_hour_minute_second_fraction, date_hour_minute_second_millis, date_optional_time,
date_time, date_time_no_millis, hour, hour_minute, hour_minute_second, hour_minute_second_fraction,
hour_minute_second_millis, ordinal_date, ordinal_date_time, ordinal_date_time_no_millis, time, time_no_millis,
t_time, t_time_no_millis, week_date, week_date_time, weekDateTimeNoMillis, week_year, weekyearWeek,
weekyearWeekDay, year, year_month, year_month_day
none, custom, basic_date, basic_date_time, basic_date_time_no_millis, basic_ordinal_date, basic_ordinal_date_time,
basic_ordinal_date_time_no_millis, basic_time, basic_time_no_millis, basic_t_time, basic_t_time_no_millis,
basic_week_date, basic_week_date_time, basic_week_date_time_no_millis, date, date_hour, date_hour_minute,
date_hour_minute_second, date_hour_minute_second_fraction, date_hour_minute_second_millis, date_optional_time,
date_time, date_time_no_millis, hour, hour_minute, hour_minute_second, hour_minute_second_fraction,
hour_minute_second_millis, ordinal_date, ordinal_date_time, ordinal_date_time_no_millis, time, time_no_millis,
t_time, t_time_no_millis, week_date, week_date_time, weekDateTimeNoMillis, week_year, weekyearWeek,
weekyearWeekDay, year, year_month, year_month_day
}

View File

@ -15,13 +15,13 @@
*/
package org.springframework.data.elasticsearch.annotations;
import org.springframework.data.annotation.Persistent;
import java.lang.annotation.*;
import org.springframework.data.annotation.Persistent;
/**
* Document
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -29,7 +29,7 @@ import java.lang.annotation.*;
@Persistent
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.TYPE })
@Target({ElementType.TYPE})
public @interface Document {
String indexName();

View File

@ -29,20 +29,19 @@ import java.lang.annotation.*;
@Documented
public @interface Field {
FieldType type() default FieldType.Auto;
FieldType type() default FieldType.Auto;
FieldIndex index() default FieldIndex.analyzed;
FieldIndex index() default FieldIndex.analyzed;
DateFormat format() default DateFormat.none;
DateFormat format() default DateFormat.none;
String pattern() default "";
String pattern() default "";
boolean store() default false;
boolean store() default false;
String searchAnalyzer() default "";
String searchAnalyzer() default "";
String indexAnalyzer() default "";
String [] ignoreFields() default {};
String indexAnalyzer() default "";
String[] ignoreFields() default {};
}

View File

@ -20,5 +20,5 @@ package org.springframework.data.elasticsearch.annotations;
* @author Mohsin Husen
*/
public enum FieldIndex {
not_analyzed, analyzed
not_analyzed, analyzed
}

View File

@ -21,5 +21,5 @@ package org.springframework.data.elasticsearch.annotations;
* @author Artur Konczak
*/
public enum FieldType {
String, Integer, Long, Date, Float, Double, Boolean, Object, Auto, Nested
String, Integer, Long, Date, Float, Double, Boolean, Object, Auto, Nested
}

View File

@ -28,7 +28,7 @@ import java.lang.annotation.*;
@Documented
public @interface MultiField {
public Field mainField();
public Field mainField();
public NestedField[] otherFields() default {};
public NestedField[] otherFields() default {};
}

View File

@ -27,15 +27,15 @@ import java.lang.annotation.Target;
@Target(ElementType.FIELD)
public @interface NestedField {
String dotSuffix();
String dotSuffix();
FieldType type();
FieldType type();
FieldIndex index() default FieldIndex.analyzed;
FieldIndex index() default FieldIndex.analyzed;
boolean store() default false;
boolean store() default false;
String searchAnalyzer() default "";
String searchAnalyzer() default "";
String indexAnalyzer() default "";
String indexAnalyzer() default "";
}

View File

@ -21,7 +21,7 @@ import org.springframework.data.annotation.Persistent;
/**
* Parent
*
*
* @author Philipp Jardas
*/
@ -30,5 +30,6 @@ import org.springframework.data.annotation.Persistent;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface Parent {
String type();
}

View File

@ -19,7 +19,7 @@ import java.lang.annotation.*;
/**
* Query
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -31,16 +31,15 @@ public @interface Query {
/**
* Elasticsearch query to be used when executing query. May contain placeholders eg. ?0
*
*
* @return
*/
String value() default "";
/**
* Named Query Named looked up by repository.
*
*
* @return
*/
String name() default "";
}

View File

@ -15,6 +15,8 @@
*/
package org.springframework.data.elasticsearch.client;
import static org.elasticsearch.node.NodeBuilder.*;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.settings.ImmutableSettings;
@ -24,11 +26,9 @@ import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import static org.elasticsearch.node.NodeBuilder.nodeBuilder;
/**
* NodeClientFactoryBean
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/

View File

@ -15,6 +15,11 @@
*/
package org.springframework.data.elasticsearch.client;
import static org.apache.commons.lang.StringUtils.*;
import static org.elasticsearch.common.settings.ImmutableSettings.*;
import java.util.Properties;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
@ -25,11 +30,6 @@ import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.util.Assert;
import java.util.Properties;
import static org.apache.commons.lang.StringUtils.*;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
/**
* TransportClientFactoryBean
*
@ -40,114 +40,114 @@ import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilde
public class TransportClientFactoryBean implements FactoryBean<TransportClient>, InitializingBean, DisposableBean {
private static final Logger logger = LoggerFactory.getLogger(TransportClientFactoryBean.class);
private String clusterNodes;
private String clusterName;
private Boolean clientTransportSniff;
private Boolean clientIgnoreClusterName = Boolean.FALSE;
private String clientPingTimeout = "5s";
private String clientNodesSamplerInterval = "5s";
private TransportClient client;
private Properties properties;
static final String COLON = ":";
static final String COMMA = ",";
private static final Logger logger = LoggerFactory.getLogger(TransportClientFactoryBean.class);
private String clusterNodes;
private String clusterName;
private Boolean clientTransportSniff;
private Boolean clientIgnoreClusterName = Boolean.FALSE;
private String clientPingTimeout = "5s";
private String clientNodesSamplerInterval = "5s";
private TransportClient client;
private Properties properties;
static final String COLON = ":";
static final String COMMA = ",";
@Override
public void destroy() throws Exception {
try {
logger.info("Closing elasticSearch client");
if (client != null) {
client.close();
}
} catch (final Exception e) {
logger.error("Error closing ElasticSearch client: ", e);
}
}
@Override
public void destroy() throws Exception {
try {
logger.info("Closing elasticSearch client");
if (client != null) {
client.close();
}
} catch (final Exception e) {
logger.error("Error closing ElasticSearch client: ", e);
}
}
@Override
public TransportClient getObject() throws Exception {
return client;
}
@Override
public TransportClient getObject() throws Exception {
return client;
}
@Override
public Class<TransportClient> getObjectType() {
return TransportClient.class;
}
@Override
public Class<TransportClient> getObjectType() {
return TransportClient.class;
}
@Override
public boolean isSingleton() {
return false;
}
@Override
public boolean isSingleton() {
return false;
}
@Override
public void afterPropertiesSet() throws Exception {
buildClient();
}
@Override
public void afterPropertiesSet() throws Exception {
buildClient();
}
protected void buildClient() throws Exception {
client = new TransportClient(settings());
Assert.hasText(clusterNodes, "[Assertion failed] clusterNodes settings missing.");
for (String clusterNode : split(clusterNodes,COMMA)) {
String hostName = substringBefore(clusterNode, COLON);
String port = substringAfter(clusterNode, COLON);
Assert.hasText(hostName, "[Assertion failed] missing host name in 'clusterNodes'");
Assert.hasText(port, "[Assertion failed] missing port in 'clusterNodes'");
logger.info("adding transport node : " + clusterNode);
client.addTransportAddress(new InetSocketTransportAddress(hostName, Integer.valueOf(port)));
}
client.connectedNodes();
}
protected void buildClient() throws Exception {
client = new TransportClient(settings());
Assert.hasText(clusterNodes, "[Assertion failed] clusterNodes settings missing.");
for (String clusterNode : split(clusterNodes, COMMA)) {
String hostName = substringBefore(clusterNode, COLON);
String port = substringAfter(clusterNode, COLON);
Assert.hasText(hostName, "[Assertion failed] missing host name in 'clusterNodes'");
Assert.hasText(port, "[Assertion failed] missing port in 'clusterNodes'");
logger.info("adding transport node : " + clusterNode);
client.addTransportAddress(new InetSocketTransportAddress(hostName, Integer.valueOf(port)));
}
client.connectedNodes();
}
private Settings settings() {
if (properties != null) {
return settingsBuilder().put(properties).build();
}
return settingsBuilder()
.put("cluster.name", clusterName)
.put("client.transport.sniff", clientTransportSniff)
.put("client.transport.ignore_cluster_name", clientIgnoreClusterName)
.put("client.transport.ping_timeout", clientPingTimeout)
.put("client.transport.nodes_sampler_interval", clientNodesSamplerInterval)
.build();
}
private Settings settings() {
if (properties != null) {
return settingsBuilder().put(properties).build();
}
return settingsBuilder()
.put("cluster.name", clusterName)
.put("client.transport.sniff", clientTransportSniff)
.put("client.transport.ignore_cluster_name", clientIgnoreClusterName)
.put("client.transport.ping_timeout", clientPingTimeout)
.put("client.transport.nodes_sampler_interval", clientNodesSamplerInterval)
.build();
}
public void setClusterNodes(String clusterNodes) {
this.clusterNodes = clusterNodes;
}
public void setClusterNodes(String clusterNodes) {
this.clusterNodes = clusterNodes;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public void setClientTransportSniff(Boolean clientTransportSniff) {
this.clientTransportSniff = clientTransportSniff;
}
public void setClientTransportSniff(Boolean clientTransportSniff) {
this.clientTransportSniff = clientTransportSniff;
}
public String getClientNodesSamplerInterval() {
return clientNodesSamplerInterval;
}
public String getClientNodesSamplerInterval() {
return clientNodesSamplerInterval;
}
public void setClientNodesSamplerInterval(String clientNodesSamplerInterval) {
this.clientNodesSamplerInterval = clientNodesSamplerInterval;
}
public void setClientNodesSamplerInterval(String clientNodesSamplerInterval) {
this.clientNodesSamplerInterval = clientNodesSamplerInterval;
}
public String getClientPingTimeout() {
return clientPingTimeout;
}
public String getClientPingTimeout() {
return clientPingTimeout;
}
public void setClientPingTimeout(String clientPingTimeout) {
this.clientPingTimeout = clientPingTimeout;
}
public void setClientPingTimeout(String clientPingTimeout) {
this.clientPingTimeout = clientPingTimeout;
}
public Boolean getClientIgnoreClusterName() {
return clientIgnoreClusterName;
}
public Boolean getClientIgnoreClusterName() {
return clientIgnoreClusterName;
}
public void setClientIgnoreClusterName(Boolean clientIgnoreClusterName) {
this.clientIgnoreClusterName = clientIgnoreClusterName;
}
public void setClientIgnoreClusterName(Boolean clientIgnoreClusterName) {
this.clientIgnoreClusterName = clientIgnoreClusterName;
}
public void setProperties(Properties properties) {
this.properties = properties;
}
public void setProperties(Properties properties) {
this.properties = properties;
}
}

View File

@ -22,7 +22,7 @@ import org.springframework.data.repository.config.RepositoryConfigurationExtensi
/**
* ElasticsearchNamespaceHandler
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/

View File

@ -25,7 +25,7 @@ import org.w3c.dom.Element;
/**
* NodeClientBeanDefinitionParser
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -46,7 +46,7 @@ public class NodeClientBeanDefinitionParser extends AbstractBeanDefinitionParser
}
private AbstractBeanDefinition getSourcedBeanDefinition(BeanDefinitionBuilder builder, Element source,
ParserContext context) {
ParserContext context) {
AbstractBeanDefinition definition = builder.getBeanDefinition();
definition.setSource(context.extractSource(source));
return definition;

View File

@ -22,11 +22,9 @@ import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.data.elasticsearch.client.TransportClientFactoryBean;
import org.w3c.dom.Element;
import static org.apache.commons.lang.StringUtils.split;
/**
* TransportClientBeanDefinitionParser
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -42,15 +40,15 @@ public class TransportClientBeanDefinitionParser extends AbstractBeanDefinitionP
private void setConfigurations(Element element, BeanDefinitionBuilder builder) {
builder.addPropertyValue("clusterNodes", element.getAttribute("cluster-nodes"));
builder.addPropertyValue("clusterName", element.getAttribute("cluster-name"));
builder.addPropertyValue("clientTransportSniff", Boolean.valueOf(element.getAttribute("client-transport-sniff")));
builder.addPropertyValue("clientIgnoreClusterName", Boolean.valueOf(element.getAttribute("client-transport-ignore-cluster-name")));
builder.addPropertyValue("clientPingTimeout", element.getAttribute("client-transport-ping-timeout"));
builder.addPropertyValue("clientNodesSamplerInterval", element.getAttribute("client-transport-nodes-sampler-interval"));
builder.addPropertyValue("clusterName", element.getAttribute("cluster-name"));
builder.addPropertyValue("clientTransportSniff", Boolean.valueOf(element.getAttribute("client-transport-sniff")));
builder.addPropertyValue("clientIgnoreClusterName", Boolean.valueOf(element.getAttribute("client-transport-ignore-cluster-name")));
builder.addPropertyValue("clientPingTimeout", element.getAttribute("client-transport-ping-timeout"));
builder.addPropertyValue("clientNodesSamplerInterval", element.getAttribute("client-transport-nodes-sampler-interval"));
}
private AbstractBeanDefinition getSourcedBeanDefinition(BeanDefinitionBuilder builder, Element source,
ParserContext context) {
ParserContext context) {
AbstractBeanDefinition definition = builder.getBeanDefinition();
definition.setSource(context.extractSource(source));
return definition;

View File

@ -15,36 +15,36 @@
*/
package org.springframework.data.elasticsearch.core;
import org.springframework.data.elasticsearch.ElasticsearchException;
import static org.apache.commons.lang.StringUtils.*;
import java.io.IOException;
import static org.apache.commons.lang.StringUtils.isBlank;
import org.springframework.data.elasticsearch.ElasticsearchException;
/**
* @author Artur Konczak
*/
public abstract class AbstractResultMapper implements ResultsMapper {
private EntityMapper entityMapper;
private EntityMapper entityMapper;
public AbstractResultMapper(EntityMapper entityMapper) {
this.entityMapper = entityMapper;
}
public AbstractResultMapper(EntityMapper entityMapper) {
this.entityMapper = entityMapper;
}
public <T> T mapEntity(String source, Class<T> clazz) {
if (isBlank(source)) {
return null;
}
try {
return entityMapper.mapToObject(source, clazz);
} catch (IOException e) {
throw new ElasticsearchException("failed to map source [ " + source + "] to class " + clazz.getSimpleName(), e);
}
}
public <T> T mapEntity(String source, Class<T> clazz) {
if (isBlank(source)) {
return null;
}
try {
return entityMapper.mapToObject(source, clazz);
} catch (IOException e) {
throw new ElasticsearchException("failed to map source [ " + source + "] to class " + clazz.getSimpleName(), e);
}
}
@Override
public EntityMapper getEntityMapper() {
return this.entityMapper;
}
@Override
public EntityMapper getEntityMapper() {
return this.entityMapper;
}
}

View File

@ -15,19 +15,21 @@
*/
package org.springframework.data.elasticsearch.core;
import org.elasticsearch.index.query.*;
import org.springframework.data.elasticsearch.core.geo.GeoBox;
import org.springframework.data.elasticsearch.core.geo.GeoPoint;
import org.springframework.data.elasticsearch.core.query.Criteria;
import org.springframework.util.Assert;
import static org.elasticsearch.index.query.FilterBuilders.*;
import static org.springframework.data.elasticsearch.core.query.Criteria.*;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import static org.elasticsearch.index.query.FilterBuilders.*;
import static org.springframework.data.elasticsearch.core.query.Criteria.OperationKey;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.query.GeoBoundingBoxFilterBuilder;
import org.elasticsearch.index.query.GeoDistanceFilterBuilder;
import org.springframework.data.elasticsearch.core.geo.GeoBox;
import org.springframework.data.elasticsearch.core.geo.GeoPoint;
import org.springframework.data.elasticsearch.core.query.Criteria;
import org.springframework.util.Assert;
/**
* CriteriaFilterProcessor
@ -37,160 +39,158 @@ import static org.springframework.data.elasticsearch.core.query.Criteria.Operati
class CriteriaFilterProcessor {
FilterBuilder createFilterFromCriteria(Criteria criteria) {
List<FilterBuilder> fbList = new LinkedList<FilterBuilder>();
FilterBuilder filter = null;
FilterBuilder createFilterFromCriteria(Criteria criteria) {
List<FilterBuilder> fbList = new LinkedList<FilterBuilder>();
FilterBuilder filter = null;
ListIterator<Criteria> chainIterator = criteria.getCriteriaChain().listIterator();
ListIterator<Criteria> chainIterator = criteria.getCriteriaChain().listIterator();
while (chainIterator.hasNext()) {
FilterBuilder fb = null;
Criteria chainedCriteria = chainIterator.next();
if (chainedCriteria.isOr()) {
fb = orFilter(createFilterFragmentForCriteria(chainedCriteria).toArray(new FilterBuilder[]{}));
fbList.add(fb);
} else if (chainedCriteria.isNegating()) {
List<FilterBuilder> negationFilters = buildNegationFilter(criteria.getField().getName(), criteria.getFilterCriteriaEntries().iterator());
while (chainIterator.hasNext()) {
FilterBuilder fb = null;
Criteria chainedCriteria = chainIterator.next();
if (chainedCriteria.isOr()) {
fb = orFilter(createFilterFragmentForCriteria(chainedCriteria).toArray(new FilterBuilder[]{}));
fbList.add(fb);
} else if (chainedCriteria.isNegating()) {
List<FilterBuilder> negationFilters = buildNegationFilter(criteria.getField().getName(), criteria.getFilterCriteriaEntries().iterator());
if (!negationFilters.isEmpty()) {
fbList.addAll(negationFilters);
}
} else {
fbList.addAll(createFilterFragmentForCriteria(chainedCriteria));
}
}
if (!negationFilters.isEmpty()) {
fbList.addAll(negationFilters);
}
} else {
fbList.addAll(createFilterFragmentForCriteria(chainedCriteria));
}
}
if (!fbList.isEmpty()) {
if (fbList.size() == 1) {
filter = fbList.get(0);
} else {
filter = andFilter(fbList.toArray(new FilterBuilder[]{}));
}
}
if (!fbList.isEmpty()) {
if (fbList.size() == 1) {
filter = fbList.get(0);
} else {
filter = andFilter(fbList.toArray(new FilterBuilder[]{}));
}
}
return filter;
}
return filter;
}
private List<FilterBuilder> createFilterFragmentForCriteria(Criteria chainedCriteria) {
Iterator<Criteria.CriteriaEntry> it = chainedCriteria.getFilterCriteriaEntries().iterator();
List<FilterBuilder> filterList = new LinkedList<FilterBuilder>();
private List<FilterBuilder> createFilterFragmentForCriteria(Criteria chainedCriteria) {
Iterator<Criteria.CriteriaEntry> it = chainedCriteria.getFilterCriteriaEntries().iterator();
List<FilterBuilder> filterList = new LinkedList<FilterBuilder>();
String fieldName = chainedCriteria.getField().getName();
Assert.notNull(fieldName, "Unknown field");
FilterBuilder filter = null;
String fieldName = chainedCriteria.getField().getName();
Assert.notNull(fieldName, "Unknown field");
FilterBuilder filter = null;
while (it.hasNext()) {
Criteria.CriteriaEntry entry = it.next();
filter = processCriteriaEntry(entry.getKey(), entry.getValue(), fieldName);
filterList.add(filter);
}
while (it.hasNext()) {
Criteria.CriteriaEntry entry = it.next();
filter = processCriteriaEntry(entry.getKey(), entry.getValue(), fieldName);
filterList.add(filter);
}
return filterList;
}
return filterList;
}
private FilterBuilder processCriteriaEntry(OperationKey key, Object value, String fieldName) {
if (value == null) {
return null;
}
FilterBuilder filter = null;
private FilterBuilder processCriteriaEntry(OperationKey key, Object value, String fieldName) {
if (value == null) {
return null;
}
FilterBuilder filter = null;
switch (key) {
case WITHIN: {
filter = geoDistanceFilter(fieldName);
switch (key) {
case WITHIN: {
filter = geoDistanceFilter(fieldName);
Assert.isTrue(value instanceof Object[], "Value of a geo distance filter should be an array of two values.");
Object[] valArray = (Object[]) value;
Assert.noNullElements(valArray, "Geo distance filter takes 2 not null elements array as parameter.");
Assert.isTrue(valArray.length == 2, "Geo distance filter takes a 2-elements array as parameter.");
Assert.isTrue(valArray[0] instanceof GeoPoint || valArray[0] instanceof String, "First element of a geo distance filter must be a GeoLocation or String");
Assert.isTrue(valArray[1] instanceof String, "Second element of a geo distance filter must be a String");
Assert.isTrue(value instanceof Object[], "Value of a geo distance filter should be an array of two values.");
Object[] valArray = (Object[]) value;
Assert.noNullElements(valArray, "Geo distance filter takes 2 not null elements array as parameter.");
Assert.isTrue(valArray.length == 2, "Geo distance filter takes a 2-elements array as parameter.");
Assert.isTrue(valArray[0] instanceof GeoPoint || valArray[0] instanceof String, "First element of a geo distance filter must be a GeoLocation or String");
Assert.isTrue(valArray[1] instanceof String, "Second element of a geo distance filter must be a String");
String dist = (String) valArray[1];
if (valArray[0] instanceof GeoPoint) {
GeoPoint loc = (GeoPoint) valArray[0];
((GeoDistanceFilterBuilder) filter).lat(loc.getLat()).lon(loc.getLon()).distance(dist);
} else {
String loc = (String) valArray[0];
if (loc.contains(",")) {
String c[] = loc.split(",");
((GeoDistanceFilterBuilder) filter).lat(Double.parseDouble(c[0])).lon(Double.parseDouble(c[1])).distance(dist);
} else {
((GeoDistanceFilterBuilder) filter).geohash(loc).distance(dist);
}
String dist = (String) valArray[1];
if (valArray[0] instanceof GeoPoint) {
GeoPoint loc = (GeoPoint) valArray[0];
((GeoDistanceFilterBuilder) filter).lat(loc.getLat()).lon(loc.getLon()).distance(dist);
} else {
String loc = (String) valArray[0];
if (loc.contains(",")) {
String c[] = loc.split(",");
((GeoDistanceFilterBuilder) filter).lat(Double.parseDouble(c[0])).lon(Double.parseDouble(c[1])).distance(dist);
} else {
((GeoDistanceFilterBuilder) filter).geohash(loc).distance(dist);
}
}
}
break;
}
break;
}
case BBOX: {
filter = geoBoundingBoxFilter(fieldName);
case BBOX: {
filter = geoBoundingBoxFilter(fieldName);
Assert.isTrue(value instanceof Object[], "Value of a boundedBy filter should be an array of one or two values.");
Object[] valArray = (Object[]) value;
Assert.noNullElements(valArray, "Geo boundedBy filter takes a not null element array as parameter.");
Assert.isTrue(value instanceof Object[], "Value of a boundedBy filter should be an array of one or two values.");
Object[] valArray = (Object[]) value;
Assert.noNullElements(valArray, "Geo boundedBy filter takes a not null element array as parameter.");
if (valArray.length == 1) {
//GeoEnvelop
oneParameterBBox((GeoBoundingBoxFilterBuilder) filter, valArray[0]);
} else if (valArray.length == 2) {
//2x GeoPoint
//2x String
twoParameterBBox((GeoBoundingBoxFilterBuilder) filter, valArray);
} else {
//error
Assert.isTrue(false, "Geo distance filter takes a 1-elements array(GeoBox) or 2-elements array(GeoPoints or Strings(format lat,lon or geohash)).");
}
break;
}
}
if (valArray.length == 1) {
//GeoEnvelop
oneParameterBBox((GeoBoundingBoxFilterBuilder) filter, valArray[0]);
} else if (valArray.length == 2) {
//2x GeoPoint
//2x String
twoParameterBBox((GeoBoundingBoxFilterBuilder) filter, valArray);
} else {
//error
Assert.isTrue(false, "Geo distance filter takes a 1-elements array(GeoBox) or 2-elements array(GeoPoints or Strings(format lat,lon or geohash)).");
}
break;
}
return filter;
}
}
private void oneParameterBBox(GeoBoundingBoxFilterBuilder filter, Object value) {
Assert.isTrue(value instanceof GeoBox, "single-element of boundedBy filter must be type of GeoBox");
GeoBox geoBBox = (GeoBox) value;
filter.topLeft(geoBBox.getTopLeft().getLat(), geoBBox.getTopLeft().getLon());
filter.bottomRight(geoBBox.getBottomRight().getLat(), geoBBox.getBottomRight().getLon());
}
return filter;
}
private static boolean isType(Object[] array, Class clazz) {
for (Object o : array) {
if (!clazz.isInstance(o)) {
return false;
}
}
return true;
}
private void oneParameterBBox(GeoBoundingBoxFilterBuilder filter, Object value) {
Assert.isTrue(value instanceof GeoBox, "single-element of boundedBy filter must be type of GeoBox");
GeoBox geoBBox = (GeoBox) value;
filter.topLeft(geoBBox.getTopLeft().getLat(), geoBBox.getTopLeft().getLon());
filter.bottomRight(geoBBox.getBottomRight().getLat(), geoBBox.getBottomRight().getLon());
}
private void twoParameterBBox(GeoBoundingBoxFilterBuilder filter, Object[] values) {
Assert.isTrue(isType(values, GeoPoint.class) || isType(values, String.class), " both elements of boundedBy filter must be type of GeoPoint or String(format lat,lon or geohash)");
if (values[0] instanceof GeoPoint) {
GeoPoint topLeft = (GeoPoint) values[0];
GeoPoint bottomRight = (GeoPoint) values[1];
filter.topLeft(topLeft.getLat(), topLeft.getLon());
filter.bottomRight(bottomRight.getLat(), bottomRight.getLon());
} else {
String topLeft = (String) values[0];
String bottomRight = (String) values[1];
filter.topLeft(topLeft);
filter.bottomRight(bottomRight);
}
}
private static boolean isType(Object[] array, Class clazz) {
for (Object o : array) {
if (!clazz.isInstance(o)) {
return false;
}
}
return true;
}
private List<FilterBuilder> buildNegationFilter(String fieldName, Iterator<Criteria.CriteriaEntry> it) {
List<FilterBuilder> notFilterList = new LinkedList<FilterBuilder>();
private void twoParameterBBox(GeoBoundingBoxFilterBuilder filter, Object[] values) {
Assert.isTrue(isType(values, GeoPoint.class) || isType(values, String.class), " both elements of boundedBy filter must be type of GeoPoint or String(format lat,lon or geohash)");
if (values[0] instanceof GeoPoint) {
GeoPoint topLeft = (GeoPoint) values[0];
GeoPoint bottomRight = (GeoPoint) values[1];
filter.topLeft(topLeft.getLat(), topLeft.getLon());
filter.bottomRight(bottomRight.getLat(), bottomRight.getLon());
} else {
String topLeft = (String) values[0];
String bottomRight = (String) values[1];
filter.topLeft(topLeft);
filter.bottomRight(bottomRight);
}
}
while (it.hasNext()) {
Criteria.CriteriaEntry criteriaEntry = it.next();
FilterBuilder notFilter = notFilter(processCriteriaEntry(criteriaEntry.getKey(), criteriaEntry.getValue(), fieldName));
notFilterList.add(notFilter);
}
private List<FilterBuilder> buildNegationFilter(String fieldName, Iterator<Criteria.CriteriaEntry> it) {
List<FilterBuilder> notFilterList = new LinkedList<FilterBuilder>();
while (it.hasNext()) {
Criteria.CriteriaEntry criteriaEntry = it.next();
FilterBuilder notFilter = notFilter(processCriteriaEntry(criteriaEntry.getKey(), criteriaEntry.getValue(), fieldName));
notFilterList.add(notFilter);
}
return notFilterList;
}
return notFilterList;
}
}

View File

@ -15,23 +15,23 @@
*/
package org.springframework.data.elasticsearch.core;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostableQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.springframework.data.elasticsearch.core.query.Criteria;
import org.springframework.util.Assert;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.springframework.data.elasticsearch.core.query.Criteria.*;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.springframework.data.elasticsearch.core.query.Criteria.OperationKey;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostableQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.springframework.data.elasticsearch.core.query.Criteria;
import org.springframework.util.Assert;
/**
* CriteriaQueryProcessor
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
* @author Franck Marchand
@ -39,136 +39,133 @@ import static org.springframework.data.elasticsearch.core.query.Criteria.Operati
class CriteriaQueryProcessor {
QueryBuilder createQueryFromCriteria(Criteria criteria) {
if(criteria == null)
return null;
QueryBuilder createQueryFromCriteria(Criteria criteria) {
if (criteria == null)
return null;
List<QueryBuilder> shouldQueryBuilderList = new LinkedList<QueryBuilder>();
List<QueryBuilder> mustNotQueryBuilderList = new LinkedList<QueryBuilder>();
List<QueryBuilder> mustQueryBuilderList = new LinkedList<QueryBuilder>();
List<QueryBuilder> shouldQueryBuilderList = new LinkedList<QueryBuilder>();
List<QueryBuilder> mustNotQueryBuilderList = new LinkedList<QueryBuilder>();
List<QueryBuilder> mustQueryBuilderList = new LinkedList<QueryBuilder>();
ListIterator<Criteria> chainIterator = criteria.getCriteriaChain().listIterator();
while (chainIterator.hasNext()) {
Criteria chainedCriteria = chainIterator.next();
QueryBuilder queryFragmentForCriteria = createQueryFragmentForCriteria(chainedCriteria);
if (queryFragmentForCriteria != null) {
if (chainedCriteria.isOr()) {
shouldQueryBuilderList.add(queryFragmentForCriteria);
} else if (chainedCriteria.isNegating()) {
mustNotQueryBuilderList.add(queryFragmentForCriteria);
} else {
mustQueryBuilderList.add(queryFragmentForCriteria);
}
}
}
BoolQueryBuilder query = null;
if (!shouldQueryBuilderList.isEmpty() || !mustNotQueryBuilderList.isEmpty() || !mustQueryBuilderList.isEmpty()) {
query = boolQuery();
for (QueryBuilder qb : shouldQueryBuilderList) {
query.should(qb);
}
for (QueryBuilder qb : mustNotQueryBuilderList) {
query.mustNot(qb);
}
for (QueryBuilder qb : mustQueryBuilderList) {
query.must(qb);
}
}
return query;
}
ListIterator<Criteria> chainIterator = criteria.getCriteriaChain().listIterator();
while (chainIterator.hasNext()) {
Criteria chainedCriteria = chainIterator.next();
QueryBuilder queryFragmentForCriteria = createQueryFragmentForCriteria(chainedCriteria);
private QueryBuilder createQueryFragmentForCriteria(Criteria chainedCriteria) {
if (chainedCriteria.getQueryCriteriaEntries().isEmpty())
return null;
if(queryFragmentForCriteria!=null) {
if(chainedCriteria.isOr()){
shouldQueryBuilderList.add(queryFragmentForCriteria);
}else if(chainedCriteria.isNegating()){
mustNotQueryBuilderList.add(queryFragmentForCriteria);
}else{
mustQueryBuilderList.add(queryFragmentForCriteria);
}
}
}
Iterator<Criteria.CriteriaEntry> it = chainedCriteria.getQueryCriteriaEntries().iterator();
boolean singeEntryCriteria = (chainedCriteria.getQueryCriteriaEntries().size() == 1);
BoolQueryBuilder query = null;
String fieldName = chainedCriteria.getField().getName();
Assert.notNull(fieldName, "Unknown field");
QueryBuilder query = null;
if(!shouldQueryBuilderList.isEmpty() || !mustNotQueryBuilderList.isEmpty() || !mustQueryBuilderList.isEmpty()) {
if (singeEntryCriteria) {
Criteria.CriteriaEntry entry = it.next();
query = processCriteriaEntry(entry.getKey(), entry.getValue(), fieldName);
} else {
query = boolQuery();
while (it.hasNext()) {
Criteria.CriteriaEntry entry = it.next();
((BoolQueryBuilder) query).must(processCriteriaEntry(entry.getKey(), entry.getValue(), fieldName));
}
}
query = boolQuery();
for(QueryBuilder qb : shouldQueryBuilderList) {
query.should(qb);
}
for(QueryBuilder qb : mustNotQueryBuilderList) {
query.mustNot(qb);
}
for(QueryBuilder qb : mustQueryBuilderList) {
query.must(qb);
}
}
return query;
}
addBoost(query, chainedCriteria.getBoost());
return query;
}
private QueryBuilder createQueryFragmentForCriteria(Criteria chainedCriteria) {
if(chainedCriteria.getQueryCriteriaEntries().isEmpty())
return null;
private QueryBuilder processCriteriaEntry(OperationKey key, Object value, String fieldName) {
if (value == null) {
return null;
}
QueryBuilder query = null;
Iterator<Criteria.CriteriaEntry> it = chainedCriteria.getQueryCriteriaEntries().iterator();
boolean singeEntryCriteria = (chainedCriteria.getQueryCriteriaEntries().size() == 1);
switch (key) {
case EQUALS:
query = fieldQuery(fieldName, value);
break;
case CONTAINS:
query = fieldQuery(fieldName, "*" + value + "*").analyzeWildcard(true);
break;
case STARTS_WITH:
query = fieldQuery(fieldName, value + "*").analyzeWildcard(true);
break;
case ENDS_WITH:
query = fieldQuery(fieldName, "*" + value).analyzeWildcard(true);
break;
case EXPRESSION:
query = queryString((String) value).field(fieldName);
break;
case BETWEEN:
Object[] ranges = (Object[]) value;
query = rangeQuery(fieldName).from(ranges[0]).to(ranges[1]);
break;
case FUZZY:
query = fuzzyQuery(fieldName, (String) value);
break;
case IN:
query = boolQuery();
Iterable<Object> collection = (Iterable<Object>) value;
for (Object item : collection) {
((BoolQueryBuilder) query).should(fieldQuery(fieldName, item));
}
break;
}
String fieldName = chainedCriteria.getField().getName();
Assert.notNull(fieldName,"Unknown field");
QueryBuilder query = null;
return query;
}
if(singeEntryCriteria){
Criteria.CriteriaEntry entry = it.next();
query = processCriteriaEntry(entry.getKey(), entry.getValue(), fieldName);
}else{
query = boolQuery();
while (it.hasNext()){
Criteria.CriteriaEntry entry = it.next();
((BoolQueryBuilder)query).must(processCriteriaEntry(entry.getKey(), entry.getValue(), fieldName));
}
}
addBoost(query, chainedCriteria.getBoost());
return query;
}
private QueryBuilder processCriteriaEntry(OperationKey key, Object value, String fieldName) {
if (value == null) {
return null;
}
QueryBuilder query = null;
switch (key) {
case EQUALS:
query = fieldQuery(fieldName, value);
break;
case CONTAINS:
query = fieldQuery(fieldName, "*" + value + "*").analyzeWildcard(true);
break;
case STARTS_WITH:
query = fieldQuery(fieldName, value + "*").analyzeWildcard(true);
break;
case ENDS_WITH:
query = fieldQuery(fieldName, "*" + value).analyzeWildcard(true);
break;
case EXPRESSION:
query = queryString((String) value).field(fieldName);
break;
case BETWEEN:
Object[] ranges = (Object[]) value;
query = rangeQuery(fieldName).from(ranges[0]).to(ranges[1]);
break;
case FUZZY:
query = fuzzyQuery(fieldName, (String) value);
break;
case IN:
query = boolQuery();
Iterable<Object> collection = (Iterable<Object>) value;
for (Object item : collection) {
((BoolQueryBuilder) query).should(fieldQuery(fieldName, item));
}
break;
}
return query;
}
private QueryBuilder buildNegationQuery(String fieldName, Iterator<Criteria.CriteriaEntry> it) {
BoolQueryBuilder notQuery = boolQuery();
while (it.hasNext()) {
notQuery.mustNot(fieldQuery(fieldName, it.next().getValue()));
}
return notQuery;
}
private void addBoost(QueryBuilder query, float boost) {
if (Float.isNaN(boost)) {
return;
}
if (query instanceof BoostableQueryBuilder) {
((BoostableQueryBuilder) query).boost(boost);
}
}
private QueryBuilder buildNegationQuery(String fieldName, Iterator<Criteria.CriteriaEntry> it) {
BoolQueryBuilder notQuery = boolQuery();
while (it.hasNext()) {
notQuery.mustNot(fieldQuery(fieldName, it.next().getValue()));
}
return notQuery;
}
private void addBoost(QueryBuilder query, float boost) {
if (Float.isNaN(boost)) {
return;
}
if (query instanceof BoostableQueryBuilder) {
((BoostableQueryBuilder) query).boost(boost);
}
}
}

View File

@ -15,11 +15,11 @@
*/
package org.springframework.data.elasticsearch.core;
import java.io.IOException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
/**
* DocumentMapper using jackson
*
@ -28,20 +28,20 @@ import java.io.IOException;
*/
public class DefaultEntityMapper implements EntityMapper {
private ObjectMapper objectMapper;
private ObjectMapper objectMapper;
public DefaultEntityMapper() {
objectMapper = new ObjectMapper();
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
}
public DefaultEntityMapper() {
objectMapper = new ObjectMapper();
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
}
@Override
public String mapToString(Object object) throws IOException {
return objectMapper.writeValueAsString(object);
}
@Override
public String mapToString(Object object) throws IOException {
return objectMapper.writeValueAsString(object);
}
@Override
public <T> T mapToObject(String source, Class<T> clazz) throws IOException {
return objectMapper.readValue(source, clazz);
}
@Override
public <T> T mapToObject(String source, Class<T> clazz) throws IOException {
return objectMapper.readValue(source, clazz);
}
}

View File

@ -16,6 +16,14 @@
package org.springframework.data.elasticsearch.core;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.Method;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.base.Strings;
@ -34,115 +42,107 @@ import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersiste
import org.springframework.data.mapping.PersistentProperty;
import org.springframework.data.mapping.context.MappingContext;
import java.lang.reflect.Method;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* @author Artur Konczak
*/
public class DefaultResultMapper extends AbstractResultMapper {
private MappingContext<? extends ElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> mappingContext;
public DefaultResultMapper(){
super(new DefaultEntityMapper());
}
public DefaultResultMapper(MappingContext<? extends ElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> mappingContext){
super(new DefaultEntityMapper());
this.mappingContext = mappingContext;
}
private MappingContext<? extends ElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> mappingContext;
public DefaultResultMapper(EntityMapper entityMapper) {
super(entityMapper);
}
public DefaultResultMapper() {
super(new DefaultEntityMapper());
}
@Override
public <T> FacetedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable) {
long totalHits = response.getHits().totalHits();
List<T> results = new ArrayList<T>();
for (SearchHit hit : response.getHits()) {
if (hit != null) {
T result = null;
if (!Strings.isNullOrEmpty(hit.sourceAsString())) {
result = mapEntity(hit.sourceAsString(), clazz);
} else {
result = mapEntity(hit.getFields().values(), clazz);
}
setPersistentEntityId(result, hit.getId(), clazz);
results.add(result);
}
}
List<FacetResult> facets = new ArrayList<FacetResult>();
if (response.getFacets() != null) {
for (Facet facet : response.getFacets()) {
FacetResult facetResult = DefaultFacetMapper.parse(facet);
if (facetResult != null) {
facets.add(facetResult);
}
}
}
public DefaultResultMapper(MappingContext<? extends ElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> mappingContext) {
super(new DefaultEntityMapper());
this.mappingContext = mappingContext;
}
return new FacetedPageImpl<T>(results, pageable, totalHits, facets);
}
public DefaultResultMapper(EntityMapper entityMapper) {
super(entityMapper);
}
private <T> T mapEntity(Collection<SearchHitField> values, Class<T> clazz) {
return mapEntity(buildJSONFromFields(values), clazz);
}
@Override
public <T> FacetedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable) {
long totalHits = response.getHits().totalHits();
List<T> results = new ArrayList<T>();
for (SearchHit hit : response.getHits()) {
if (hit != null) {
T result = null;
if (!Strings.isNullOrEmpty(hit.sourceAsString())) {
result = mapEntity(hit.sourceAsString(), clazz);
} else {
result = mapEntity(hit.getFields().values(), clazz);
}
setPersistentEntityId(result, hit.getId(), clazz);
results.add(result);
}
}
List<FacetResult> facets = new ArrayList<FacetResult>();
if (response.getFacets() != null) {
for (Facet facet : response.getFacets()) {
FacetResult facetResult = DefaultFacetMapper.parse(facet);
if (facetResult != null) {
facets.add(facetResult);
}
}
}
private String buildJSONFromFields(Collection<SearchHitField> values) {
JsonFactory nodeFactory = new JsonFactory();
try {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
JsonGenerator generator = nodeFactory.createGenerator(stream, JsonEncoding.UTF8);
generator.writeStartObject();
for (SearchHitField value : values) {
if (value.getValues().size() > 1) {
generator.writeArrayFieldStart(value.getName());
for (Object val : value.getValues()) {
generator.writeObject(val);
}
generator.writeEndArray();
} else {
generator.writeObjectField(value.getName(), value.getValue());
}
}
generator.writeEndObject();
generator.flush();
return new String(stream.toByteArray(), Charset.forName("UTF-8"));
} catch (IOException e) {
return null;
}
}
return new FacetedPageImpl<T>(results, pageable, totalHits, facets);
}
@Override
public <T> T mapResult(GetResponse response, Class<T> clazz) {
T result = mapEntity(response.getSourceAsString(),clazz);
if (result != null){
setPersistentEntityId(result, response.getId(), clazz);
}
return result;
}
private <T> void setPersistentEntityId(T result, String id, Class<T> clazz) {
if (mappingContext != null && clazz.isAnnotationPresent(Document.class)){
PersistentProperty<ElasticsearchPersistentProperty> idProperty = mappingContext.getPersistentEntity(clazz).getIdProperty();
// Only deal with String because ES generated Ids are strings !
if (idProperty != null && idProperty.getType().isAssignableFrom(String.class)){
Method setter = idProperty.getSetter();
if (setter != null){
try{
setter.invoke(result, id);
} catch (Throwable t) {
t.printStackTrace();
}
}
}
}
}
private <T> T mapEntity(Collection<SearchHitField> values, Class<T> clazz) {
return mapEntity(buildJSONFromFields(values), clazz);
}
private String buildJSONFromFields(Collection<SearchHitField> values) {
JsonFactory nodeFactory = new JsonFactory();
try {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
JsonGenerator generator = nodeFactory.createGenerator(stream, JsonEncoding.UTF8);
generator.writeStartObject();
for (SearchHitField value : values) {
if (value.getValues().size() > 1) {
generator.writeArrayFieldStart(value.getName());
for (Object val : value.getValues()) {
generator.writeObject(val);
}
generator.writeEndArray();
} else {
generator.writeObjectField(value.getName(), value.getValue());
}
}
generator.writeEndObject();
generator.flush();
return new String(stream.toByteArray(), Charset.forName("UTF-8"));
} catch (IOException e) {
return null;
}
}
@Override
public <T> T mapResult(GetResponse response, Class<T> clazz) {
T result = mapEntity(response.getSourceAsString(), clazz);
if (result != null) {
setPersistentEntityId(result, response.getId(), clazz);
}
return result;
}
private <T> void setPersistentEntityId(T result, String id, Class<T> clazz) {
if (mappingContext != null && clazz.isAnnotationPresent(Document.class)) {
PersistentProperty<ElasticsearchPersistentProperty> idProperty = mappingContext.getPersistentEntity(clazz).getIdProperty();
// Only deal with String because ES generated Ids are strings !
if (idProperty != null && idProperty.getType().isAssignableFrom(String.class)) {
Method setter = idProperty.getSetter();
if (setter != null) {
try {
setter.invoke(result, id);
} catch (Throwable t) {
t.printStackTrace();
}
}
}
}
}
}

View File

@ -15,17 +15,17 @@
*/
package org.springframework.data.elasticsearch.core;
import java.util.List;
import java.util.Set;
import org.elasticsearch.action.update.UpdateResponse;
import org.springframework.data.domain.Page;
import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter;
import org.springframework.data.elasticsearch.core.query.*;
import java.util.List;
import java.util.Set;
/**
* ElasticsearchOperations
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -38,7 +38,7 @@ public interface ElasticsearchOperations {
/**
* Create an index for a class
*
*
* @param clazz
* @param <T>
*/
@ -46,7 +46,7 @@ public interface ElasticsearchOperations {
/**
* Create mapping for a class
*
*
* @param clazz
* @param <T>
*/
@ -54,26 +54,26 @@ public interface ElasticsearchOperations {
/**
* Execute the query against elasticsearch and return the first returned object
*
*
* @param query
* @param clazz
* @return the first matching object
*/
<T> T queryForObject(GetQuery query, Class<T> clazz);
/**
* Execute the query against elasticsearch and return the first returned object using custom mapper
*
* @param query
* @param clazz
* @param mapper
* @return the first matching object
*/
<T> T queryForObject(GetQuery query, Class<T> clazz, GetResultMapper mapper);
/**
* Execute the query against elasticsearch and return the first returned object using custom mapper
*
* @param query
* @param clazz
* @param mapper
* @return the first matching object
*/
<T> T queryForObject(GetQuery query, Class<T> clazz, GetResultMapper mapper);
/**
* Execute the query against elasticsearch and return the first returned object
*
*
* @param query
* @param clazz
* @return the first matching object
@ -82,7 +82,7 @@ public interface ElasticsearchOperations {
/**
* Execute the query against elasticsearch and return the first returned object
*
*
* @param query
* @param clazz
* @return the first matching object
@ -91,25 +91,25 @@ public interface ElasticsearchOperations {
/**
* Execute the query against elasticsearch and return result as {@link Page}
*
*
* @param query
* @param clazz
* @return
*/
<T> FacetedPage<T> queryForPage(SearchQuery query, Class<T> clazz);
/**
* Execute the query against elasticsearch and return result as {@link Page} using custom mapper
*
* @param query
* @param clazz
* @return
*/
<T> FacetedPage<T> queryForPage(SearchQuery query, Class<T> clazz, SearchResultMapper mapper);
/**
* Execute the query against elasticsearch and return result as {@link Page} using custom mapper
*
* @param query
* @param clazz
* @return
*/
<T> FacetedPage<T> queryForPage(SearchQuery query, Class<T> clazz, SearchResultMapper mapper);
/**
* Execute the query against elasticsearch and return result as {@link Page}
*
*
* @param query
* @param clazz
* @return
@ -118,25 +118,25 @@ public interface ElasticsearchOperations {
/**
* Execute the query against elasticsearch and return result as {@link Page}
*
*
* @param query
* @param clazz
* @return
*/
<T> FacetedPage<T> queryForPage(StringQuery query, Class<T> clazz);
/**
* Execute the query against elasticsearch and return result as {@link Page} using custom mapper
*
* @param query
* @param clazz
* @return
*/
<T> FacetedPage<T> queryForPage(StringQuery query, Class<T> clazz, SearchResultMapper mapper);
/**
* Execute the query against elasticsearch and return result as {@link Page} using custom mapper
*
* @param query
* @param clazz
* @return
*/
<T> FacetedPage<T> queryForPage(StringQuery query, Class<T> clazz, SearchResultMapper mapper);
/**
* Execute the criteria query against elasticsearch and return result as {@link List}
*
*
* @param query
* @param clazz
* @param <T>
@ -146,7 +146,7 @@ public interface ElasticsearchOperations {
/**
* Execute the string query against elasticsearch and return result as {@link List}
*
*
* @param query
* @param clazz
* @param <T>
@ -154,19 +154,19 @@ public interface ElasticsearchOperations {
*/
<T> List<T> queryForList(StringQuery query, Class<T> clazz);
/**
* Execute the search query against elasticsearch and return result as {@link List}
*
* @param query
* @param clazz
* @param <T>
* @return
*/
<T> List<T> queryForList(SearchQuery query, Class<T> clazz);
/**
* Execute the search query against elasticsearch and return result as {@link List}
*
* @param query
* @param clazz
* @param <T>
* @return
*/
<T> List<T> queryForList(SearchQuery query, Class<T> clazz);
/**
* Execute the query against elasticsearch and return ids
*
*
* @param query
* @return
*/
@ -174,7 +174,7 @@ public interface ElasticsearchOperations {
/**
* return number of elements found by for given query
*
*
* @param query
* @param clazz
* @return
@ -183,30 +183,30 @@ public interface ElasticsearchOperations {
/**
* Index an object. Will do save or update
*
*
* @param query
* @return returns the document id
*/
String index(IndexQuery query);
/**
* Partial update of the document
*
* @param updateQuery
* @return
*/
UpdateResponse update(UpdateQuery updateQuery);
/**
* Partial update of the document
*
* @param updateQuery
* @return
*/
UpdateResponse update(UpdateQuery updateQuery);
/**
* Bulk index all objects. Will do save or update
*
*
* @param queries
*/
void bulkIndex(List<IndexQuery> queries);
/**
* Delete the one object with provided id
*
*
* @param indexName
* @param type
* @param id
@ -216,7 +216,7 @@ public interface ElasticsearchOperations {
/**
* Delete the one object with provided id
*
*
* @param clazz
* @param id
* @return documentId of the document deleted
@ -225,18 +225,18 @@ public interface ElasticsearchOperations {
/**
* Delete all records matching the query
*
*
* @param clazz
* @param query
*/
<T> void delete(DeleteQuery query, Class<T> clazz);
/**
* Delete all records matching the query
*
* @param query
*/
void delete(DeleteQuery query);
* Delete all records matching the query
*
* @param query
*/
void delete(DeleteQuery query);
/**
* Deletes an index for given entity
@ -248,34 +248,34 @@ public interface ElasticsearchOperations {
<T> boolean deleteIndex(Class<T> clazz);
/**
* Deletes a type in an index
*
* @param index
* @param type
*/
void deleteType(String index, String type);
* Deletes a type in an index
*
* @param index
* @param type
*/
void deleteType(String index, String type);
/**
* check if index is exists
*
*
* @param clazz
* @param <T>
* @return
*/
<T> boolean indexExists(Class<T> clazz);
/**
* check if type is exists in an index
*
* @param index
* @param type
* @return
*/
boolean typeExists(String index, String type);
/**
* check if type is exists in an index
*
* @param index
* @param type
* @return
*/
boolean typeExists(String index, String type);
/**
* refresh the index
*
*
* @param indexName
* @param waitForOperation
*/
@ -283,7 +283,7 @@ public interface ElasticsearchOperations {
/**
* refresh the index
*
*
* @param clazz
* @param waitForOperation
*/
@ -291,7 +291,7 @@ public interface ElasticsearchOperations {
/**
* Returns scroll id for scan query
*
*
* @param query
* @param scrollTimeInMillis
* @param noFields
@ -301,7 +301,7 @@ public interface ElasticsearchOperations {
/**
* Scrolls the results for give scroll id
*
*
* @param scrollId
* @param scrollTimeInMillis
* @param clazz
@ -310,20 +310,20 @@ public interface ElasticsearchOperations {
*/
<T> Page<T> scroll(String scrollId, long scrollTimeInMillis, Class<T> clazz);
/**
* Scrolls the results for give scroll id using custom result mapper
*
* @param scrollId
* @param scrollTimeInMillis
* @param mapper
* @param <T>
* @return
*/
<T> Page<T> scroll(String scrollId, long scrollTimeInMillis, SearchResultMapper mapper);
/**
* Scrolls the results for give scroll id using custom result mapper
*
* @param scrollId
* @param scrollTimeInMillis
* @param mapper
* @param <T>
* @return
*/
<T> Page<T> scroll(String scrollId, long scrollTimeInMillis, SearchResultMapper mapper);
/**
* more like this query to search for documents that are "like" a specific document.
*
*
* @param query
* @param clazz
* @param <T>
@ -331,27 +331,27 @@ public interface ElasticsearchOperations {
*/
<T> Page<T> moreLikeThis(MoreLikeThisQuery query, Class<T> clazz);
/**
* adding new alias
*
* @param query
* @return
*/
Boolean addAlias(AliasQuery query);
/**
* adding new alias
*
* @param query
* @return
*/
Boolean addAlias(AliasQuery query);
/**
* removing previously created alias
*
* @param query
* @return
*/
Boolean removeAlias(AliasQuery query);
/**
* removing previously created alias
*
* @param query
* @return
*/
Boolean removeAlias(AliasQuery query);
/**
* get all the alias pointing to specified index
*
* @param indexName
* @return
*/
Set<String> queryForAlias(String indexName);
/**
* get all the alias pointing to specified index
*
* @param indexName
* @return
*/
Set<String> queryForAlias(String indexName);
}

View File

@ -26,7 +26,7 @@ import java.io.IOException;
*/
public interface EntityMapper {
public String mapToString(Object object) throws IOException;
public String mapToString(Object object) throws IOException;
public <T> T mapToObject(String source, Class<T> clazz) throws IOException;
public <T> T mapToObject(String source, Class<T> clazz) throws IOException;
}

View File

@ -15,13 +15,12 @@
*/
package org.springframework.data.elasticsearch.core;
import java.util.List;
import org.springframework.data.domain.Page;
import org.springframework.data.elasticsearch.core.facet.FacetResult;
import java.util.List;
/**
*
* @author Rizwan Idrees
* @author Mohsin Husen
* @author Artur Konczak
@ -29,10 +28,9 @@ import java.util.List;
*/
public interface FacetedPage<T> extends Page<T> {
boolean hasFacets();
boolean hasFacets();
List<FacetResult> getFacets();
FacetResult getFacet(String name);
List<FacetResult> getFacets();
FacetResult getFacet(String name);
}

View File

@ -15,15 +15,15 @@
*/
package org.springframework.data.elasticsearch.core;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.elasticsearch.core.facet.FacetResult;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Container for query result and facet results
*
@ -34,37 +34,37 @@ import java.util.Map;
*/
public class FacetedPageImpl<T> extends PageImpl<T> implements FacetedPage<T> {
private List<FacetResult> facets;
private Map<String, FacetResult> mapOfFacets = new HashMap<String, FacetResult>();
private List<FacetResult> facets;
private Map<String, FacetResult> mapOfFacets = new HashMap<String, FacetResult>();
public FacetedPageImpl(List<T> content) {
super(content);
}
public FacetedPageImpl(List<T> content) {
super(content);
}
public FacetedPageImpl(List<T> content, Pageable pageable, long total) {
super(content, pageable, total);
}
public FacetedPageImpl(List<T> content, Pageable pageable, long total) {
super(content, pageable, total);
}
public FacetedPageImpl(List<T> content, Pageable pageable, long total, List<FacetResult> facets) {
super(content, pageable, total);
this.facets = facets;
for (FacetResult facet : facets) {
mapOfFacets.put(facet.getName(), facet);
}
}
public FacetedPageImpl(List<T> content, Pageable pageable, long total, List<FacetResult> facets) {
super(content, pageable, total);
this.facets = facets;
for (FacetResult facet : facets) {
mapOfFacets.put(facet.getName(), facet);
}
}
@Override
public boolean hasFacets() {
return CollectionUtils.isNotEmpty(facets);
}
@Override
public boolean hasFacets() {
return CollectionUtils.isNotEmpty(facets);
}
@Override
public List<FacetResult> getFacets() {
return facets;
}
@Override
public List<FacetResult> getFacets() {
return facets;
}
@Override
public FacetResult getFacet(String name) {
return mapOfFacets.get(name);
}
@Override
public FacetResult getFacet(String name) {
return mapOfFacets.get(name);
}
}

View File

@ -23,6 +23,5 @@ import org.elasticsearch.action.get.GetResponse;
*/
public interface GetResultMapper {
<T> T mapResult(GetResponse response, Class<T> clazz);
<T> T mapResult(GetResponse response, Class<T> clazz);
}

View File

@ -15,6 +15,14 @@
*/
package org.springframework.data.elasticsearch.core;
import static org.apache.commons.lang.StringUtils.*;
import static org.elasticsearch.common.xcontent.XContentFactory.*;
import static org.springframework.util.StringUtils.*;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.springframework.core.GenericCollectionTypeResolver;
import org.springframework.data.annotation.Transient;
@ -25,15 +33,6 @@ import org.springframework.data.mapping.model.SimpleTypeHolder;
import org.springframework.data.util.ClassTypeInformation;
import org.springframework.data.util.TypeInformation;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import static org.apache.commons.lang.StringUtils.EMPTY;
import static org.apache.commons.lang.StringUtils.isNotBlank;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.springframework.util.StringUtils.hasText;
/**
* @author Rizwan Idrees
* @author Mohsin Husen
@ -42,269 +41,267 @@ import static org.springframework.util.StringUtils.hasText;
class MappingBuilder {
public static final String FIELD_STORE = "store";
public static final String FIELD_TYPE = "type";
public static final String FIELD_INDEX = "index";
public static final String FIELD_FORMAT = "format";
public static final String FIELD_SEARCH_ANALYZER = "search_analyzer";
public static final String FIELD_INDEX_ANALYZER = "index_analyzer";
public static final String FIELD_PROPERTIES = "properties";
public static final String FIELD_PARENT = "_parent";
public static final String FIELD_STORE = "store";
public static final String FIELD_TYPE = "type";
public static final String FIELD_INDEX = "index";
public static final String FIELD_FORMAT = "format";
public static final String FIELD_SEARCH_ANALYZER = "search_analyzer";
public static final String FIELD_INDEX_ANALYZER = "index_analyzer";
public static final String FIELD_PROPERTIES = "properties";
public static final String FIELD_PARENT = "_parent";
public static final String INDEX_VALUE_NOT_ANALYZED = "not_analyzed";
public static final String TYPE_VALUE_STRING = "string";
public static final String TYPE_VALUE_GEO_POINT = "geo_point";
public static final String INDEX_VALUE_NOT_ANALYZED = "not_analyzed";
public static final String TYPE_VALUE_STRING = "string";
public static final String TYPE_VALUE_GEO_POINT = "geo_point";
private static SimpleTypeHolder SIMPLE_TYPE_HOLDER = new SimpleTypeHolder();
private static SimpleTypeHolder SIMPLE_TYPE_HOLDER = new SimpleTypeHolder();
static XContentBuilder buildMapping(Class clazz, String indexType, String idFieldName, String parentType) throws IOException {
static XContentBuilder buildMapping(Class clazz, String indexType, String idFieldName, String parentType) throws IOException {
XContentBuilder mapping = jsonBuilder().startObject().startObject(indexType);
// Parent
if (hasText(parentType)) {
mapping.startObject(FIELD_PARENT).field(FIELD_TYPE,parentType).endObject();
}
XContentBuilder mapping = jsonBuilder().startObject().startObject(indexType);
// Parent
if (hasText(parentType)) {
mapping.startObject(FIELD_PARENT).field(FIELD_TYPE, parentType).endObject();
}
// Properties
XContentBuilder xContentBuilder = mapping.startObject(FIELD_PROPERTIES);
// Properties
XContentBuilder xContentBuilder = mapping.startObject(FIELD_PROPERTIES);
mapEntity(xContentBuilder, clazz, true, idFieldName, EMPTY, false);
mapEntity(xContentBuilder, clazz, true, idFieldName, EMPTY, false);
return xContentBuilder.endObject().endObject().endObject();
}
return xContentBuilder.endObject().endObject().endObject();
}
private static void mapEntity(XContentBuilder xContentBuilder, Class clazz, boolean isRootObject, String idFieldName,
String nestedObjectFieldName, boolean nestedAnnotaion) throws IOException {
private static void mapEntity(XContentBuilder xContentBuilder, Class clazz, boolean isRootObject, String idFieldName,
String nestedObjectFieldName, boolean nestedAnnotaion) throws IOException {
java.lang.reflect.Field[] fields = clazz.getDeclaredFields();
java.lang.reflect.Field[] fields = clazz.getDeclaredFields();
if (!isRootObject && (isAnyPropertyAnnotatedAsField(fields) || nestedAnnotaion)) {
String type = FieldType.Object.toString().toLowerCase();
if(nestedAnnotaion){
type = FieldType.Nested.toString().toLowerCase();
}
xContentBuilder.startObject(nestedObjectFieldName).field(FIELD_TYPE, type).startObject(FIELD_PROPERTIES);
}
if (!isRootObject && (isAnyPropertyAnnotatedAsField(fields) || nestedAnnotaion)) {
String type = FieldType.Object.toString().toLowerCase();
if (nestedAnnotaion) {
type = FieldType.Nested.toString().toLowerCase();
}
xContentBuilder.startObject(nestedObjectFieldName).field(FIELD_TYPE, type).startObject(FIELD_PROPERTIES);
}
for (java.lang.reflect.Field field : fields) {
for (java.lang.reflect.Field field : fields) {
if (field.isAnnotationPresent(Transient.class) || isInIgnoreFields(field)) {
continue;
}
boolean isGeoField = isGeoField(field);
if (field.isAnnotationPresent(Transient.class) || isInIgnoreFields(field)) {
continue;
}
Field singleField = field.getAnnotation(Field.class);
if (!isGeoField && isEntity(field) && !isAnnotated(field)) {
if(singleField == null){
continue;
}
boolean nestedField = isNestedField(field);
mapEntity(xContentBuilder, getFieldType(field), false, EMPTY, field.getName(), nestedField);
if(nestedField){
continue;
}
}
boolean isGeoField = isGeoField(field);
MultiField multiField = field.getAnnotation(MultiField.class);
Field singleField = field.getAnnotation(Field.class);
if (!isGeoField && isEntity(field) && !isAnnotated(field)) {
if (singleField == null) {
continue;
}
boolean nestedField = isNestedField(field);
mapEntity(xContentBuilder, getFieldType(field), false, EMPTY, field.getName(), nestedField);
if (nestedField) {
continue;
}
}
if (isGeoField) {
applyGeoPointFieldMapping(xContentBuilder, field);
}
MultiField multiField = field.getAnnotation(MultiField.class);
if (isRootObject && singleField != null && isIdField(field, idFieldName)) {
applyDefaultIdFieldMapping(xContentBuilder, field);
} else if (multiField != null) {
addMultiFieldMapping(xContentBuilder, field, multiField);
} else if (singleField != null) {
addSingleFieldMapping(xContentBuilder, field, singleField);
}
}
if (isGeoField) {
applyGeoPointFieldMapping(xContentBuilder, field);
}
if (!isRootObject && isAnyPropertyAnnotatedAsField(fields)) {
xContentBuilder.endObject().endObject();
}
if (isRootObject && singleField != null && isIdField(field, idFieldName)) {
applyDefaultIdFieldMapping(xContentBuilder, field);
} else if (multiField != null) {
addMultiFieldMapping(xContentBuilder, field, multiField);
} else if (singleField != null) {
addSingleFieldMapping(xContentBuilder, field, singleField);
}
}
}
if (!isRootObject && isAnyPropertyAnnotatedAsField(fields)) {
xContentBuilder.endObject().endObject();
}
}
private static boolean isAnnotated(java.lang.reflect.Field field) {
return field.getAnnotation(Field.class)==null && field.getAnnotation(MultiField.class)==null && field.getAnnotation(GeoPointField.class)==null;
}
private static boolean isAnnotated(java.lang.reflect.Field field) {
return field.getAnnotation(Field.class) == null && field.getAnnotation(MultiField.class) == null && field.getAnnotation(GeoPointField.class) == null;
}
private static void applyGeoPointFieldMapping(XContentBuilder xContentBuilder, java.lang.reflect.Field field) throws IOException {
xContentBuilder.startObject(field.getName());
xContentBuilder.field(FIELD_TYPE, TYPE_VALUE_GEO_POINT)
.endObject();
}
private static void applyGeoPointFieldMapping(XContentBuilder xContentBuilder, java.lang.reflect.Field field) throws IOException {
xContentBuilder.startObject(field.getName());
xContentBuilder.field(FIELD_TYPE, TYPE_VALUE_GEO_POINT)
.endObject();
}
private static void applyDefaultIdFieldMapping(XContentBuilder xContentBuilder, java.lang.reflect.Field field)
throws IOException {
xContentBuilder.startObject(field.getName())
.field(FIELD_TYPE, TYPE_VALUE_STRING)
.field(FIELD_INDEX, INDEX_VALUE_NOT_ANALYZED);
xContentBuilder.endObject();
}
private static void applyDefaultIdFieldMapping(XContentBuilder xContentBuilder, java.lang.reflect.Field field)
throws IOException {
xContentBuilder.startObject(field.getName())
.field(FIELD_TYPE, TYPE_VALUE_STRING)
.field(FIELD_INDEX, INDEX_VALUE_NOT_ANALYZED);
xContentBuilder.endObject();
}
/**
* Apply mapping for a single @Field annotation
*
* @param xContentBuilder
* @param field
* @param fieldAnnotation
* @throws IOException
*/
private static void addSingleFieldMapping(XContentBuilder xContentBuilder, java.lang.reflect.Field field,
Field fieldAnnotation) throws IOException {
xContentBuilder.startObject(field.getName());
xContentBuilder.field(FIELD_STORE, fieldAnnotation.store());
if (FieldType.Auto != fieldAnnotation.type()) {
xContentBuilder.field(FIELD_TYPE, fieldAnnotation.type().name().toLowerCase());
if (FieldType.Date == fieldAnnotation.type() && DateFormat.none != fieldAnnotation.format()) {
xContentBuilder.field(FIELD_FORMAT, DateFormat.custom == fieldAnnotation.format()
? fieldAnnotation.pattern() : fieldAnnotation.format());
}
}
if (FieldIndex.not_analyzed == fieldAnnotation.index()) {
xContentBuilder.field(FIELD_INDEX, fieldAnnotation.index().name().toLowerCase());
}
if (isNotBlank(fieldAnnotation.searchAnalyzer())) {
xContentBuilder.field(FIELD_SEARCH_ANALYZER, fieldAnnotation.searchAnalyzer());
}
if (isNotBlank(fieldAnnotation.indexAnalyzer())) {
xContentBuilder.field(FIELD_INDEX_ANALYZER, fieldAnnotation.indexAnalyzer());
}
xContentBuilder.endObject();
}
/**
* Apply mapping for a single @Field annotation
*
* @param xContentBuilder
* @param field
* @param fieldAnnotation
* @throws IOException
*/
private static void addSingleFieldMapping(XContentBuilder xContentBuilder, java.lang.reflect.Field field,
Field fieldAnnotation) throws IOException {
xContentBuilder.startObject(field.getName());
xContentBuilder.field(FIELD_STORE, fieldAnnotation.store());
if (FieldType.Auto != fieldAnnotation.type()) {
xContentBuilder.field(FIELD_TYPE, fieldAnnotation.type().name().toLowerCase());
if (FieldType.Date == fieldAnnotation.type() && DateFormat.none != fieldAnnotation.format()) {
xContentBuilder.field(FIELD_FORMAT, DateFormat.custom == fieldAnnotation.format()
? fieldAnnotation.pattern() : fieldAnnotation.format());
}
}
if (FieldIndex.not_analyzed == fieldAnnotation.index()) {
xContentBuilder.field(FIELD_INDEX, fieldAnnotation.index().name().toLowerCase());
}
if (isNotBlank(fieldAnnotation.searchAnalyzer())) {
xContentBuilder.field(FIELD_SEARCH_ANALYZER, fieldAnnotation.searchAnalyzer());
}
if (isNotBlank(fieldAnnotation.indexAnalyzer())) {
xContentBuilder.field(FIELD_INDEX_ANALYZER, fieldAnnotation.indexAnalyzer());
}
xContentBuilder.endObject();
}
/**
* Apply mapping for a single nested @Field annotation
*
* @param builder
* @param field
* @param annotation
* @throws IOException
*/
private static void addNestedFieldMapping(XContentBuilder builder, java.lang.reflect.Field field,
NestedField annotation) throws IOException {
builder.startObject(field.getName() + "." + annotation.dotSuffix());
builder.field(FIELD_STORE, annotation.store());
if (FieldType.Auto != annotation.type()) {
builder.field(FIELD_TYPE, annotation.type().name().toLowerCase());
}
if (FieldIndex.not_analyzed == annotation.index()) {
builder.field(FIELD_INDEX, annotation.index().name().toLowerCase());
}
if (isNotBlank(annotation.searchAnalyzer())) {
builder.field(FIELD_SEARCH_ANALYZER, annotation.searchAnalyzer());
}
if (isNotBlank(annotation.indexAnalyzer())) {
builder.field(FIELD_INDEX_ANALYZER, annotation.indexAnalyzer());
}
builder.endObject();
}
/**
* Apply mapping for a single nested @Field annotation
*
* @param builder
* @param field
* @param annotation
* @throws IOException
*/
private static void addNestedFieldMapping(XContentBuilder builder, java.lang.reflect.Field field,
NestedField annotation) throws IOException {
builder.startObject(field.getName() + "." + annotation.dotSuffix());
builder.field(FIELD_STORE, annotation.store());
if (FieldType.Auto != annotation.type()) {
builder.field(FIELD_TYPE, annotation.type().name().toLowerCase());
}
if (FieldIndex.not_analyzed == annotation.index()) {
builder.field(FIELD_INDEX, annotation.index().name().toLowerCase());
}
if (isNotBlank(annotation.searchAnalyzer())) {
builder.field(FIELD_SEARCH_ANALYZER, annotation.searchAnalyzer());
}
if (isNotBlank(annotation.indexAnalyzer())) {
builder.field(FIELD_INDEX_ANALYZER, annotation.indexAnalyzer());
}
builder.endObject();
}
/**
* Multi field mappings for string type fields, support for sorts and facets
*
* @param builder
* @param field
* @param annotation
* @throws IOException
*/
private static void addMultiFieldMapping(XContentBuilder builder, java.lang.reflect.Field field,
MultiField annotation) throws IOException {
builder.startObject(field.getName());
builder.field(FIELD_TYPE, "multi_field");
builder.startObject("fields");
//add standard field
addSingleFieldMapping(builder, field, annotation.mainField());
for (NestedField nestedField : annotation.otherFields()) {
addNestedFieldMapping(builder, field, nestedField);
}
builder.endObject();
builder.endObject();
}
/**
* Multi field mappings for string type fields, support for sorts and facets
*
* @param builder
* @param field
* @param annotation
* @throws IOException
*/
private static void addMultiFieldMapping(XContentBuilder builder, java.lang.reflect.Field field,
MultiField annotation) throws IOException {
builder.startObject(field.getName());
builder.field(FIELD_TYPE, "multi_field");
builder.startObject("fields");
//add standard field
addSingleFieldMapping(builder, field, annotation.mainField());
for (NestedField nestedField : annotation.otherFields()) {
addNestedFieldMapping(builder, field, nestedField);
}
builder.endObject();
builder.endObject();
}
/**
* Facet field for string type, for other types we don't need it(long, int, double, float)
*
* @param builder
* @param field
* @param annotation
* @throws IOException
*/
private static void addFacetMapping(XContentBuilder builder, java.lang.reflect.Field field, Field annotation) throws IOException {
builder.startObject(FacetRequest.FIELD_UNTOUCHED)
.field(FIELD_TYPE, TYPE_VALUE_STRING)
.field(FIELD_INDEX, INDEX_VALUE_NOT_ANALYZED)
.field(FIELD_STORE, true);
builder.endObject();
}
/**
* Facet field for string type, for other types we don't need it(long, int, double, float)
*
* @param builder
* @param field
* @param annotation
* @throws IOException
*/
private static void addFacetMapping(XContentBuilder builder, java.lang.reflect.Field field, Field annotation) throws IOException {
builder.startObject(FacetRequest.FIELD_UNTOUCHED)
.field(FIELD_TYPE, TYPE_VALUE_STRING)
.field(FIELD_INDEX, INDEX_VALUE_NOT_ANALYZED)
.field(FIELD_STORE, true);
builder.endObject();
}
/**
* Sort field for string type, for other types we don't need it(long, int, double, float)
* value of the field should be converted to lowercase and not analise
*
* @param builder
* @param field
* @param annotation
* @throws IOException
*/
private static void addSortMapping(XContentBuilder builder, java.lang.reflect.Field field, Field annotation) throws IOException {
builder.startObject(FacetRequest.FIELD_SORT)
.field(FIELD_TYPE, TYPE_VALUE_STRING)
.field(FIELD_INDEX, "keyword")
.field(FIELD_STORE, true);
builder.endObject();
}
/**
* Sort field for string type, for other types we don't need it(long, int, double, float)
* value of the field should be converted to lowercase and not analise
*
* @param builder
* @param field
* @param annotation
* @throws IOException
*/
private static void addSortMapping(XContentBuilder builder, java.lang.reflect.Field field, Field annotation) throws IOException {
builder.startObject(FacetRequest.FIELD_SORT)
.field(FIELD_TYPE, TYPE_VALUE_STRING)
.field(FIELD_INDEX, "keyword")
.field(FIELD_STORE, true);
builder.endObject();
}
protected static boolean isEntity(java.lang.reflect.Field field) {
TypeInformation typeInformation = ClassTypeInformation.from(field.getType());
Class<?> clazz = getFieldType(field);
boolean isComplexType = !SIMPLE_TYPE_HOLDER.isSimpleType(clazz);
return isComplexType && !Map.class.isAssignableFrom(typeInformation.getType());
}
protected static boolean isEntity(java.lang.reflect.Field field) {
TypeInformation typeInformation = ClassTypeInformation.from(field.getType());
Class<?> clazz = getFieldType(field);
boolean isComplexType = !SIMPLE_TYPE_HOLDER.isSimpleType(clazz);
return isComplexType && !Map.class.isAssignableFrom(typeInformation.getType());
}
protected static Class<?> getFieldType(java.lang.reflect.Field field) {
Class<?> clazz = field.getType();
TypeInformation typeInformation = ClassTypeInformation.from(clazz);
if (typeInformation.isCollectionLike()) {
clazz = GenericCollectionTypeResolver.getCollectionFieldType(field) != null ? GenericCollectionTypeResolver.getCollectionFieldType(field) : typeInformation.getComponentType().getType();
}
return clazz;
}
protected static Class<?> getFieldType(java.lang.reflect.Field field) {
Class<?> clazz = field.getType();
TypeInformation typeInformation = ClassTypeInformation.from(clazz);
if (typeInformation.isCollectionLike()) {
clazz = GenericCollectionTypeResolver.getCollectionFieldType(field) != null ? GenericCollectionTypeResolver.getCollectionFieldType(field) : typeInformation.getComponentType().getType();
}
return clazz;
}
private static boolean isAnyPropertyAnnotatedAsField(java.lang.reflect.Field[] fields) {
if (fields != null) {
for (java.lang.reflect.Field field : fields) {
if (field.isAnnotationPresent(Field.class)) {
return true;
}
}
}
return false;
}
private static boolean isAnyPropertyAnnotatedAsField(java.lang.reflect.Field[] fields) {
if (fields != null) {
for (java.lang.reflect.Field field : fields) {
if (field.isAnnotationPresent(Field.class)) {
return true;
}
}
}
return false;
}
private static boolean isIdField(java.lang.reflect.Field field, String idFieldName) {
return idFieldName.equals(field.getName());
}
private static boolean isIdField(java.lang.reflect.Field field, String idFieldName) {
return idFieldName.equals(field.getName());
}
private static boolean isInIgnoreFields(java.lang.reflect.Field field) {
Field fieldAnnotation = field.getAnnotation(Field.class);
if ( null != fieldAnnotation ) {
String [] ignoreFields = fieldAnnotation.ignoreFields();
return Arrays.asList(ignoreFields).contains(field.getName());
}
return false;
}
private static boolean isInIgnoreFields(java.lang.reflect.Field field) {
Field fieldAnnotation = field.getAnnotation(Field.class);
if (null != fieldAnnotation) {
String[] ignoreFields = fieldAnnotation.ignoreFields();
return Arrays.asList(ignoreFields).contains(field.getName());
}
return false;
}
private static boolean isNestedField(java.lang.reflect.Field field) {
Field fieldAnnotation = field.getAnnotation(Field.class);
return fieldAnnotation != null && (FieldType.Nested == fieldAnnotation.type() || FieldType.Object == fieldAnnotation.type());
}
private static boolean isGeoField(java.lang.reflect.Field field) {
return field.getType() == GeoPoint.class || field.getAnnotation(GeoPointField.class) != null;
}
private static boolean isNestedField(java.lang.reflect.Field field) {
Field fieldAnnotation = field.getAnnotation(Field.class);
return fieldAnnotation != null && (FieldType.Nested == fieldAnnotation.type() || FieldType.Object == fieldAnnotation.type());
}
private static boolean isGeoField(java.lang.reflect.Field field) {
return field.getType() == GeoPoint.class || field.getAnnotation(GeoPointField.class) != null;
}
}

View File

@ -25,6 +25,5 @@ package org.springframework.data.elasticsearch.core;
public interface ResultsMapper extends SearchResultMapper, GetResultMapper {
EntityMapper getEntityMapper();
EntityMapper getEntityMapper();
}

View File

@ -23,6 +23,5 @@ import org.springframework.data.domain.Pageable;
*/
public interface SearchResultMapper {
<T> FacetedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable);
<T> FacetedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable);
}

View File

@ -15,6 +15,8 @@
*/
package org.springframework.data.elasticsearch.core.convert;
import java.util.Date;
import org.joda.time.DateTimeZone;
import org.joda.time.LocalDateTime;
import org.joda.time.ReadableInstant;
@ -22,11 +24,9 @@ import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.springframework.core.convert.converter.Converter;
import java.util.Date;
/**
* DateTimeConverters
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -74,5 +74,4 @@ public final class DateTimeConverters {
}
}
}

View File

@ -22,7 +22,7 @@ import org.springframework.data.mapping.context.MappingContext;
/**
* ElasticsearchConverter
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -31,16 +31,15 @@ public interface ElasticsearchConverter {
/**
* Returns the underlying {@link org.springframework.data.mapping.context.MappingContext} used by the converter.
*
*
* @return never {@literal null}
*/
MappingContext<? extends ElasticsearchPersistentEntity<?>, ElasticsearchPersistentProperty> getMappingContext();
/**
* Returns the underlying {@link org.springframework.core.convert.ConversionService} used by the converter.
*
*
* @return never {@literal null}.
*/
ConversionService getConversionService();
}

View File

@ -28,7 +28,7 @@ import org.springframework.util.Assert;
/**
* MappingElasticsearchConverter
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -62,5 +62,4 @@ public class MappingElasticsearchConverter implements ElasticsearchConverter, Ap
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
}

View File

@ -22,24 +22,24 @@ import org.springframework.util.Assert;
*/
public abstract class AbstractFacetRequest implements FacetRequest {
private String name;
private boolean applyQueryFilter;
private String name;
private boolean applyQueryFilter;
public AbstractFacetRequest(String name) {
Assert.hasText(name, "Facet can't be null or empty !!!");
this.name = name;
}
public AbstractFacetRequest(String name) {
Assert.hasText(name, "Facet can't be null or empty !!!");
this.name = name;
}
protected String getName(){
return name;
}
protected String getName() {
return name;
}
public void setApplyQueryFilter(boolean applyQueryFilter) {
this.applyQueryFilter = applyQueryFilter;
}
public void setApplyQueryFilter(boolean applyQueryFilter) {
this.applyQueryFilter = applyQueryFilter;
}
@Override
public boolean applyQueryFilter() {
return applyQueryFilter;
}
@Override
public boolean applyQueryFilter() {
return applyQueryFilter;
}
}

View File

@ -23,24 +23,24 @@ import org.springframework.util.Assert;
* @author Artur Konczak
* @author Jonathan Yan
*/
public class AbstactFacetResult implements FacetResult {
public class AbstractFacetResult implements FacetResult {
private final String name;
private final FacetType type;
private final String name;
private final FacetType type;
protected AbstactFacetResult(String name, FacetType type) {
Assert.hasText(name, "Facet name can't be null and should have a value");
this.name = name;
this.type = type;
}
protected AbstractFacetResult(String name, FacetType type) {
Assert.hasText(name, "Facet name can't be null and should have a value");
this.name = name;
this.type = type;
}
@Override
public String getName() {
return name;
}
@Override
public String getName() {
return name;
}
@Override
public FacetType getType() {
return type;
}
@Override
public FacetType getType() {
return type;
}
}

View File

@ -15,6 +15,9 @@
*/
package org.springframework.data.elasticsearch.core.facet;
import java.util.ArrayList;
import java.util.List;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import org.elasticsearch.search.facet.range.RangeFacet;
@ -22,61 +25,57 @@ import org.elasticsearch.search.facet.statistical.StatisticalFacet;
import org.elasticsearch.search.facet.terms.TermsFacet;
import org.springframework.data.elasticsearch.core.facet.result.*;
import java.util.ArrayList;
import java.util.List;
/**
* @author Artur Konczak
* @author Petar Tahchiev
*/
public class DefaultFacetMapper {
public static FacetResult parse(Facet facet) {
if (facet instanceof TermsFacet) {
return parseTerm((TermsFacet) facet);
}
public static FacetResult parse(Facet facet) {
if (facet instanceof TermsFacet) {
return parseTerm((TermsFacet) facet);
}
if (facet instanceof RangeFacet) {
return parseRange((RangeFacet) facet);
}
if (facet instanceof RangeFacet) {
return parseRange((RangeFacet) facet);
}
if (facet instanceof StatisticalFacet) {
return parseStatistical((StatisticalFacet) facet);
}
if (facet instanceof StatisticalFacet) {
return parseStatistical((StatisticalFacet) facet);
}
if (facet instanceof HistogramFacet) {
return parseHistogram((HistogramFacet) facet);
}
if (facet instanceof HistogramFacet) {
return parseHistogram((HistogramFacet) facet);
}
return null;
}
return null;
}
private static FacetResult parseTerm(TermsFacet facet) {
List<Term> entries = new ArrayList<Term>();
for (TermsFacet.Entry entry : facet.getEntries()) {
entries.add(new Term(entry.getTerm().toString(), entry.getCount()));
}
return new TermResult(facet.getName(), entries);
}
private static FacetResult parseTerm(TermsFacet facet) {
List<Term> entries = new ArrayList<Term>();
for (TermsFacet.Entry entry : facet.getEntries()) {
entries.add(new Term(entry.getTerm().toString(), entry.getCount()));
}
return new TermResult(facet.getName(), entries);
}
private static FacetResult parseRange(RangeFacet facet) {
List<Range> entries = new ArrayList<Range>();
for (RangeFacet.Entry entry : facet.getEntries()) {
entries.add(new Range(entry.getFrom() == Double.NEGATIVE_INFINITY ? null : entry.getFrom(), entry.getTo() == Double.POSITIVE_INFINITY ? null : entry.getTo(), entry.getCount(), entry.getTotal(), entry.getTotalCount(), entry.getMin(), entry.getMax()));
}
return new RangeResult(facet.getName(), entries);
}
private static FacetResult parseRange(RangeFacet facet) {
List<Range> entries = new ArrayList<Range>();
for (RangeFacet.Entry entry : facet.getEntries()) {
entries.add(new Range(entry.getFrom() == Double.NEGATIVE_INFINITY ? null : entry.getFrom(), entry.getTo() == Double.POSITIVE_INFINITY ? null : entry.getTo(), entry.getCount(), entry.getTotal(), entry.getTotalCount(), entry.getMin(), entry.getMax()));
}
return new RangeResult(facet.getName(), entries);
}
private static FacetResult parseStatistical(StatisticalFacet facet) {
return new StatisticalResult(facet.getName(), facet.getCount(), facet.getMax(), facet.getMin(), facet.getMean(), facet.getStdDeviation(), facet.getSumOfSquares(), facet.getTotal(), facet.getVariance());
}
private static FacetResult parseHistogram(HistogramFacet facet) {
List<IntervalUnit> entries = new ArrayList<IntervalUnit>();
for (HistogramFacet.Entry entry : facet.getEntries()) {
entries.add(new IntervalUnit(entry.getKey(), entry.getCount(), entry.getTotalCount(), entry.getTotal(), entry.getMean(), entry.getMin(), entry.getMax()));
}
return new HistogramResult(facet.getName(), entries);
}
private static FacetResult parseStatistical(StatisticalFacet facet) {
return new StatisticalResult(facet.getName(), facet.getCount(), facet.getMax(), facet.getMin(), facet.getMean(), facet.getStdDeviation(), facet.getSumOfSquares(), facet.getTotal(), facet.getVariance());
}
private static FacetResult parseHistogram(HistogramFacet facet) {
List<IntervalUnit> entries = new ArrayList<IntervalUnit>();
for (HistogramFacet.Entry entry : facet.getEntries()) {
entries.add(new IntervalUnit(entry.getKey(), entry.getCount(), entry.getTotalCount(), entry.getTotal(), entry.getMean(), entry.getMin(), entry.getMax()));
}
return new HistogramResult(facet.getName(), entries);
}
}

View File

@ -22,11 +22,10 @@ import org.elasticsearch.search.facet.FacetBuilder;
*/
public interface FacetRequest {
public static final String FIELD_UNTOUCHED = "untouched";
public static final String FIELD_SORT = "sort";
public static final String FIELD_UNTOUCHED = "untouched";
public static final String FIELD_SORT = "sort";
FacetBuilder getFacet();
boolean applyQueryFilter();
FacetBuilder getFacet();
boolean applyQueryFilter();
}

View File

@ -22,12 +22,10 @@ package org.springframework.data.elasticsearch.core.facet;
* @author Mohsin Husen
* @author Artur Konczak
* @author Jonathan Yan
*
*/
public interface FacetResult {
String getName();
FacetType getType();
String getName();
FacetType getType();
}

View File

@ -21,6 +21,6 @@ package org.springframework.data.elasticsearch.core.facet;
*/
public enum FacetType {
term, range, histogram, statistical
term, range, histogram, statistical
}

View File

@ -15,6 +15,8 @@
*/
package org.springframework.data.elasticsearch.core.facet.request;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import org.elasticsearch.search.facet.FacetBuilder;
import org.elasticsearch.search.facet.FacetBuilders;
@ -22,48 +24,46 @@ import org.elasticsearch.search.facet.histogram.HistogramFacetBuilder;
import org.springframework.data.elasticsearch.core.facet.AbstractFacetRequest;
import org.springframework.util.Assert;
import java.util.concurrent.TimeUnit;
/**
* @author Artur Konczak
* @author Mohsin Husen
*/
public class HistogramFacetRequest extends AbstractFacetRequest {
private String field;
private long interval;
private TimeUnit timeUnit;
private String field;
private long interval;
private TimeUnit timeUnit;
public HistogramFacetRequest(String name) {
super(name);
}
public HistogramFacetRequest(String name) {
super(name);
}
public void setField(String field) {
this.field = field;
}
public void setField(String field) {
this.field = field;
}
public void setInterval(long interval) {
this.interval = interval;
}
public void setInterval(long interval) {
this.interval = interval;
}
public void setTimeUnit(TimeUnit timeUnit) {
this.timeUnit = timeUnit;
}
public void setTimeUnit(TimeUnit timeUnit) {
this.timeUnit = timeUnit;
}
public FacetBuilder getFacet() {
Assert.notNull(getName(), "Facet name can't be a null !!!");
Assert.isTrue(StringUtils.isNotBlank(field), "Please select field on which to build the facet !!!");
Assert.isTrue(interval > 0, "Please provide interval as positive value greater them zero !!!");
public FacetBuilder getFacet() {
Assert.notNull(getName(), "Facet name can't be a null !!!");
Assert.isTrue(StringUtils.isNotBlank(field), "Please select field on which to build the facet !!!");
Assert.isTrue(interval > 0, "Please provide interval as positive value greater them zero !!!");
HistogramFacetBuilder builder = FacetBuilders.histogramFacet(getName());
builder.field(field);
HistogramFacetBuilder builder = FacetBuilders.histogramFacet(getName());
builder.field(field);
if (timeUnit != null) {
builder.interval(interval, timeUnit);
} else {
builder.interval(interval);
}
if (timeUnit != null) {
builder.interval(interval, timeUnit);
} else {
builder.interval(interval);
}
return builder;
}
return builder;
}
}

View File

@ -15,42 +15,42 @@
*/
package org.springframework.data.elasticsearch.core.facet.request;
import org.springframework.data.elasticsearch.core.facet.FacetRequest;
import java.util.concurrent.TimeUnit;
import org.springframework.data.elasticsearch.core.facet.FacetRequest;
/**
* @author Artur Konczak
*/
public class HistogramFacetRequestBuilder {
HistogramFacetRequest result;
HistogramFacetRequest result;
public HistogramFacetRequestBuilder(String name) {
result = new HistogramFacetRequest(name);
}
public HistogramFacetRequestBuilder(String name) {
result = new HistogramFacetRequest(name);
}
public HistogramFacetRequestBuilder field(String field) {
result.setField(field);
return this;
}
public HistogramFacetRequestBuilder field(String field) {
result.setField(field);
return this;
}
public HistogramFacetRequestBuilder interval(long interval) {
result.setInterval(interval);
return this;
}
public HistogramFacetRequestBuilder interval(long interval) {
result.setInterval(interval);
return this;
}
public HistogramFacetRequestBuilder timeUnit(TimeUnit timeUnit) {
result.setTimeUnit(timeUnit);
return this;
}
public HistogramFacetRequestBuilder timeUnit(TimeUnit timeUnit) {
result.setTimeUnit(timeUnit);
return this;
}
public FacetRequest build() {
return result;
}
public FacetRequest build() {
return result;
}
public HistogramFacetRequestBuilder applyQueryFilter() {
result.setApplyQueryFilter(true);
return this;
}
public HistogramFacetRequestBuilder applyQueryFilter() {
result.setApplyQueryFilter(true);
return this;
}
}

View File

@ -24,25 +24,25 @@ import org.springframework.data.elasticsearch.core.facet.FacetRequest;
*/
public class NativeFacetRequest implements FacetRequest {
private FacetBuilder facet;
private boolean applyQueryFilter;
private FacetBuilder facet;
private boolean applyQueryFilter;
public NativeFacetRequest(FacetBuilder facet) {
this(facet, false);
}
public NativeFacetRequest(FacetBuilder facet) {
this(facet, false);
}
public NativeFacetRequest(FacetBuilder facet, boolean applyQueryFilter) {
this.facet = facet;
this.applyQueryFilter = applyQueryFilter;
}
public NativeFacetRequest(FacetBuilder facet, boolean applyQueryFilter) {
this.facet = facet;
this.applyQueryFilter = applyQueryFilter;
}
@Override
public FacetBuilder getFacet() {
return facet;
}
@Override
public FacetBuilder getFacet() {
return facet;
}
@Override
public boolean applyQueryFilter() {
return applyQueryFilter;
}
@Override
public boolean applyQueryFilter() {
return applyQueryFilter;
}
}

View File

@ -15,6 +15,9 @@
*/
package org.springframework.data.elasticsearch.core.facet.request;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.elasticsearch.search.facet.FacetBuilder;
import org.elasticsearch.search.facet.FacetBuilders;
@ -22,9 +25,6 @@ import org.elasticsearch.search.facet.range.RangeFacetBuilder;
import org.springframework.data.elasticsearch.core.facet.AbstractFacetRequest;
import org.springframework.util.Assert;
import java.util.ArrayList;
import java.util.List;
/**
* Range facet for numeric fields
*
@ -32,56 +32,56 @@ import java.util.List;
*/
public class RangeFacetRequest extends AbstractFacetRequest {
private String field;
private String keyField;
private String valueField;
private String field;
private String keyField;
private String valueField;
private List<Double> from = new ArrayList<Double>();
private List<Double> to = new ArrayList<Double>();
private List<Double> from = new ArrayList<Double>();
private List<Double> to = new ArrayList<Double>();
public RangeFacetRequest(String name) {
super(name);
}
public RangeFacetRequest(String name) {
super(name);
}
public void setField(String field) {
this.field = field;
}
public void setField(String field) {
this.field = field;
}
public void setFields(String keyField, String valueField) {
this.keyField = keyField;
this.valueField = valueField;
}
public void setFields(String keyField, String valueField) {
this.keyField = keyField;
this.valueField = valueField;
}
public void range(Double from, Double to) {
if (from == null) {
this.from.add(Double.NEGATIVE_INFINITY);
} else {
this.from.add(from);
}
public void range(Double from, Double to) {
if (from == null) {
this.from.add(Double.NEGATIVE_INFINITY);
} else {
this.from.add(from);
}
if (to == null) {
this.to.add(Double.POSITIVE_INFINITY);
} else {
this.to.add(to);
}
}
if (to == null) {
this.to.add(Double.POSITIVE_INFINITY);
} else {
this.to.add(to);
}
}
@Override
public FacetBuilder getFacet() {
Assert.notNull(getName(), "Facet name can't be a null !!!");
Assert.isTrue(StringUtils.isNotBlank(field) || StringUtils.isNotBlank(keyField) && StringUtils.isNotBlank(valueField), "Please select field or key field and value field !!!");
@Override
public FacetBuilder getFacet() {
Assert.notNull(getName(), "Facet name can't be a null !!!");
Assert.isTrue(StringUtils.isNotBlank(field) || StringUtils.isNotBlank(keyField) && StringUtils.isNotBlank(valueField), "Please select field or key field and value field !!!");
RangeFacetBuilder builder = FacetBuilders.rangeFacet(getName());
if (StringUtils.isNotBlank(keyField)) {
builder.keyField(keyField).valueField(valueField);
} else {
builder.field(field);
}
Assert.notEmpty(from, "Please select at last one range");
Assert.notEmpty(to, "Please select at last one range");
for (int i = 0; i < from.size(); i++) {
builder.addRange(from.get(i), to.get(i));
}
return builder;
}
RangeFacetBuilder builder = FacetBuilders.rangeFacet(getName());
if (StringUtils.isNotBlank(keyField)) {
builder.keyField(keyField).valueField(valueField);
} else {
builder.field(field);
}
Assert.notEmpty(from, "Please select at last one range");
Assert.notEmpty(to, "Please select at last one range");
for (int i = 0; i < from.size(); i++) {
builder.addRange(from.get(i), to.get(i));
}
return builder;
}
}

View File

@ -24,44 +24,44 @@ import org.springframework.data.elasticsearch.core.facet.FacetRequest;
*/
public class RangeFacetRequestBuilder {
RangeFacetRequest result;
RangeFacetRequest result;
public RangeFacetRequestBuilder(String name) {
result = new RangeFacetRequest(name);
}
public RangeFacetRequestBuilder(String name) {
result = new RangeFacetRequest(name);
}
public RangeFacetRequestBuilder field(String field) {
result.setField(field);
return this;
}
public RangeFacetRequestBuilder field(String field) {
result.setField(field);
return this;
}
public RangeFacetRequestBuilder fields(String keyField, String valueField) {
result.setFields(keyField, valueField);
return this;
}
public RangeFacetRequestBuilder fields(String keyField, String valueField) {
result.setFields(keyField, valueField);
return this;
}
public RangeFacetRequestBuilder range(double from, double to) {
result.range(from, to);
return this;
}
public RangeFacetRequestBuilder range(double from, double to) {
result.range(from, to);
return this;
}
public RangeFacetRequestBuilder from(double from) {
result.range(from, null);
return this;
}
public RangeFacetRequestBuilder from(double from) {
result.range(from, null);
return this;
}
public RangeFacetRequestBuilder to(double to) {
result.range(null, to);
return this;
}
public RangeFacetRequestBuilder to(double to) {
result.range(null, to);
return this;
}
public RangeFacetRequestBuilder applyQueryFilter() {
result.setApplyQueryFilter(true);
return this;
}
public RangeFacetRequestBuilder applyQueryFilter() {
result.setApplyQueryFilter(true);
return this;
}
public FacetRequest build() {
return result;
}
public FacetRequest build() {
return result;
}
}

View File

@ -28,33 +28,33 @@ import org.springframework.util.Assert;
*/
public class StatisticalFacetRequest extends AbstractFacetRequest {
private String field;
private String field;
private String[] fields;
private String[] fields;
public StatisticalFacetRequest(String name) {
super(name);
}
public StatisticalFacetRequest(String name) {
super(name);
}
public void setField(String field) {
this.field = field;
}
public void setField(String field) {
this.field = field;
}
public void setFields(String... fields) {
this.fields = fields;
}
public void setFields(String... fields) {
this.fields = fields;
}
public FacetBuilder getFacet() {
Assert.notNull(getName(), "Facet name can't be a null !!!");
Assert.isTrue(StringUtils.isNotBlank(field) && fields == null, "Please select field or fields on which to build the facets !!!");
public FacetBuilder getFacet() {
Assert.notNull(getName(), "Facet name can't be a null !!!");
Assert.isTrue(StringUtils.isNotBlank(field) && fields == null, "Please select field or fields on which to build the facets !!!");
StatisticalFacetBuilder builder = FacetBuilders.statisticalFacet(getName());
if (ArrayUtils.isNotEmpty(fields)) {
builder.fields(fields);
} else {
builder.field(field);
}
StatisticalFacetBuilder builder = FacetBuilders.statisticalFacet(getName());
if (ArrayUtils.isNotEmpty(fields)) {
builder.fields(fields);
} else {
builder.field(field);
}
return builder;
}
return builder;
}
}

View File

@ -22,28 +22,28 @@ import org.springframework.data.elasticsearch.core.facet.FacetRequest;
*/
public class StatisticalFacetRequestBuilder {
StatisticalFacetRequest result;
StatisticalFacetRequest result;
public StatisticalFacetRequestBuilder(String name) {
result = new StatisticalFacetRequest(name);
}
public StatisticalFacetRequestBuilder(String name) {
result = new StatisticalFacetRequest(name);
}
public StatisticalFacetRequestBuilder field(String field) {
result.setField(field);
return this;
}
public StatisticalFacetRequestBuilder field(String field) {
result.setField(field);
return this;
}
public StatisticalFacetRequestBuilder fields(String... fields) {
result.setFields(fields);
return this;
}
public StatisticalFacetRequestBuilder fields(String... fields) {
result.setFields(fields);
return this;
}
public StatisticalFacetRequestBuilder applyQueryFilter() {
result.setApplyQueryFilter(true);
return this;
}
public StatisticalFacetRequestBuilder applyQueryFilter() {
result.setApplyQueryFilter(true);
return this;
}
public FacetRequest build() {
return result;
}
public FacetRequest build() {
return result;
}
}

View File

@ -21,6 +21,6 @@ package org.springframework.data.elasticsearch.core.facet.request;
*/
public enum TermFacetOrder {
ascTerm, descTerm, ascCount, descCount;
ascTerm, descTerm, ascCount, descCount;
}

View File

@ -31,78 +31,78 @@ import org.springframework.util.Assert;
*/
public class TermFacetRequest extends AbstractFacetRequest {
private String[] fields;
private Object[] excludeTerms;
private int size = 10;
private TermFacetOrder order = TermFacetOrder.descCount;
private boolean allTerms = false;
private String regex = null;
private int regexFlag = 0;
private String[] fields;
private Object[] excludeTerms;
private int size = 10;
private TermFacetOrder order = TermFacetOrder.descCount;
private boolean allTerms = false;
private String regex = null;
private int regexFlag = 0;
public TermFacetRequest(String name) {
super(name);
}
public TermFacetRequest(String name) {
super(name);
}
public void setFields(String... fields) {
this.fields = fields;
}
public void setFields(String... fields) {
this.fields = fields;
}
public void setSize(int size) {
Assert.isTrue(size >= 0, "Size should be bigger then zero !!!");
this.size = size;
}
public void setSize(int size) {
Assert.isTrue(size >= 0, "Size should be bigger then zero !!!");
this.size = size;
}
public void setOrder(TermFacetOrder order) {
this.order = order;
}
public void setOrder(TermFacetOrder order) {
this.order = order;
}
public void setExcludeTerms(Object... excludeTerms) {
this.excludeTerms = excludeTerms;
}
public void setExcludeTerms(Object... excludeTerms) {
this.excludeTerms = excludeTerms;
}
public void setAllTerms(boolean allTerms) {
this.allTerms = allTerms;
}
public void setAllTerms(boolean allTerms) {
this.allTerms = allTerms;
}
public void setRegex(String regex) {
this.regex = regex;
}
public void setRegex(String regex) {
this.regex = regex;
}
public void setRegex(String regex, int regexFlag) {
this.regex = regex;
this.regexFlag = regexFlag;
}
public void setRegex(String regex, int regexFlag) {
this.regex = regex;
this.regexFlag = regexFlag;
}
@Override
public FacetBuilder getFacet() {
Assert.notEmpty(fields, "Please select at last one field !!!");
TermsFacetBuilder builder = FacetBuilders.termsFacet(getName()).fields(fields).size(size);
switch (order) {
@Override
public FacetBuilder getFacet() {
Assert.notEmpty(fields, "Please select at last one field !!!");
TermsFacetBuilder builder = FacetBuilders.termsFacet(getName()).fields(fields).size(size);
switch (order) {
case descTerm:
builder.order(TermsFacet.ComparatorType.REVERSE_TERM);
break;
case ascTerm:
builder.order(TermsFacet.ComparatorType.TERM);
break;
case ascCount:
builder.order(TermsFacet.ComparatorType.REVERSE_COUNT);
break;
default:
builder.order(TermsFacet.ComparatorType.COUNT);
}
if (ArrayUtils.isNotEmpty(excludeTerms)) {
builder.exclude(excludeTerms);
}
case descTerm:
builder.order(TermsFacet.ComparatorType.REVERSE_TERM);
break;
case ascTerm:
builder.order(TermsFacet.ComparatorType.TERM);
break;
case ascCount:
builder.order(TermsFacet.ComparatorType.REVERSE_COUNT);
break;
default:
builder.order(TermsFacet.ComparatorType.COUNT);
}
if (ArrayUtils.isNotEmpty(excludeTerms)) {
builder.exclude(excludeTerms);
}
if (allTerms) {
builder.allTerms(allTerms);
}
if (allTerms) {
builder.allTerms(allTerms);
}
if (StringUtils.isNotBlank(regex)) {
builder.regex(regex, regexFlag);
}
if (StringUtils.isNotBlank(regex)) {
builder.regex(regex, regexFlag);
}
return builder;
}
return builder;
}
}

View File

@ -24,68 +24,68 @@ import org.springframework.data.elasticsearch.core.facet.FacetRequest;
*/
public class TermFacetRequestBuilder {
private TermFacetRequest result;
private TermFacetRequest result;
public TermFacetRequestBuilder(String name) {
result = new TermFacetRequest(name);
}
public TermFacetRequestBuilder(String name) {
result = new TermFacetRequest(name);
}
public TermFacetRequestBuilder fields(String... fields) {
result.setFields(fields);
return this;
}
public TermFacetRequestBuilder fields(String... fields) {
result.setFields(fields);
return this;
}
public TermFacetRequestBuilder size(int size) {
result.setSize(size);
return this;
}
public TermFacetRequestBuilder size(int size) {
result.setSize(size);
return this;
}
public TermFacetRequestBuilder excludeTerms(Object... terms) {
result.setExcludeTerms(terms);
return this;
}
public TermFacetRequestBuilder excludeTerms(Object... terms) {
result.setExcludeTerms(terms);
return this;
}
public TermFacetRequestBuilder allTerms() {
result.setAllTerms(true);
return this;
}
public TermFacetRequestBuilder allTerms() {
result.setAllTerms(true);
return this;
}
public TermFacetRequestBuilder regex(String regex) {
result.setRegex(regex);
return this;
}
public TermFacetRequestBuilder regex(String regex) {
result.setRegex(regex);
return this;
}
public TermFacetRequestBuilder regex(String regex, int regexFlag) {
result.setRegex(regex, regexFlag);
return this;
}
public TermFacetRequestBuilder regex(String regex, int regexFlag) {
result.setRegex(regex, regexFlag);
return this;
}
public TermFacetRequestBuilder ascTerm() {
result.setOrder(TermFacetOrder.ascTerm);
return this;
}
public TermFacetRequestBuilder ascTerm() {
result.setOrder(TermFacetOrder.ascTerm);
return this;
}
public TermFacetRequestBuilder descTerm() {
result.setOrder(TermFacetOrder.descTerm);
return this;
}
public TermFacetRequestBuilder descTerm() {
result.setOrder(TermFacetOrder.descTerm);
return this;
}
public TermFacetRequestBuilder ascCount() {
result.setOrder(TermFacetOrder.ascCount);
return this;
}
public TermFacetRequestBuilder ascCount() {
result.setOrder(TermFacetOrder.ascCount);
return this;
}
public TermFacetRequestBuilder descCount() {
result.setOrder(TermFacetOrder.descCount);
return this;
}
public TermFacetRequestBuilder descCount() {
result.setOrder(TermFacetOrder.descCount);
return this;
}
public TermFacetRequestBuilder applyQueryFilter() {
result.setApplyQueryFilter(true);
return this;
}
public TermFacetRequestBuilder applyQueryFilter() {
result.setApplyQueryFilter(true);
return this;
}
public FacetRequest build() {
return result;
}
public FacetRequest build() {
return result;
}
}

View File

@ -15,25 +15,24 @@
*/
package org.springframework.data.elasticsearch.core.facet.result;
import org.springframework.data.elasticsearch.core.facet.AbstactFacetResult;
import org.springframework.data.elasticsearch.core.facet.FacetType;
import java.util.List;
import org.springframework.data.elasticsearch.core.facet.AbstractFacetResult;
import org.springframework.data.elasticsearch.core.facet.FacetType;
/**
* @author Artur Konczak
*/
public class HistogramResult extends AbstactFacetResult {
public class HistogramResult extends AbstractFacetResult {
private List<IntervalUnit> terms;
private List<IntervalUnit> terms;
public HistogramResult(String name, List<IntervalUnit> terms) {
super(name, FacetType.term);
this.terms = terms;
}
public List<IntervalUnit> getIntervalUnit() {
return terms;
}
public HistogramResult(String name, List<IntervalUnit> terms) {
super(name, FacetType.term);
this.terms = terms;
}
public List<IntervalUnit> getIntervalUnit() {
return terms;
}
}

View File

@ -28,64 +28,64 @@ import java.util.Date;
*/
public class IntervalUnit {
private static final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm");
private static final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm");
long key;
long count;
long totalCount;
double total;
double mean;
double min;
double max;
long key;
long count;
long totalCount;
double total;
double mean;
double min;
double max;
public IntervalUnit(long key, long count, long totalCount, double total, double mean, double min, double max) {
this.key = key;
this.count = count;
this.totalCount = totalCount;
this.total = total;
this.mean = mean;
this.min = min;
this.max = max;
}
public IntervalUnit(long key, long count, long totalCount, double total, double mean, double min, double max) {
this.key = key;
this.count = count;
this.totalCount = totalCount;
this.total = total;
this.mean = mean;
this.min = min;
this.max = max;
}
public long getKey() {
return key;
}
public long getKey() {
return key;
}
public long getCount() {
return count;
}
public long getCount() {
return count;
}
public long getTotalCount() {
return totalCount;
}
public long getTotalCount() {
return totalCount;
}
public double getTotal() {
return total;
}
public double getTotal() {
return total;
}
public double getMean() {
return mean;
}
public double getMean() {
return mean;
}
public double getMin() {
return min;
}
public double getMin() {
return min;
}
public double getMax() {
return max;
}
public double getMax() {
return max;
}
@Override
public String toString() {
return "IntervalUnit{" +
"key=" + format.format(new Date(key)) +
", count=" + count +
", totalCount=" + totalCount +
", total=" + total +
", mean=" + mean +
", min=" + min +
", max=" + max +
'}';
}
@Override
public String toString() {
return "IntervalUnit{" +
"key=" + format.format(new Date(key)) +
", count=" + count +
", totalCount=" + totalCount +
", total=" + total +
", mean=" + mean +
", min=" + min +
", max=" + max +
'}';
}
}

View File

@ -25,54 +25,54 @@ package org.springframework.data.elasticsearch.core.facet.result;
*/
public class Range {
private Double from;
private Double to;
private long count;
private double total;
private double totalCount;
private double min = Double.POSITIVE_INFINITY;
private double max = Double.NEGATIVE_INFINITY;
private Double from;
private Double to;
private long count;
private double total;
private double totalCount;
private double min = Double.POSITIVE_INFINITY;
private double max = Double.NEGATIVE_INFINITY;
public Range(Double from, Double to, long count, double total, double totalCount, double min, double max) {
this.from = from;
this.to = to;
this.count = count;
this.total = total;
this.totalCount = totalCount;
this.min = min;
this.max = max;
}
public Range(Double from, Double to, long count, double total, double totalCount, double min, double max) {
this.from = from;
this.to = to;
this.count = count;
this.total = total;
this.totalCount = totalCount;
this.min = min;
this.max = max;
}
public Double getFrom() {
return from;
}
public Double getFrom() {
return from;
}
public Double getTo() {
return to;
}
public Double getTo() {
return to;
}
/**
* Return number of documents in range
*
* @return
*/
public long getCount() {
return count;
}
/**
* Return number of documents in range
*
* @return
*/
public long getCount() {
return count;
}
public double getTotal() {
return total;
}
public double getTotal() {
return total;
}
public double getTotalCount() {
return totalCount;
}
public double getTotalCount() {
return totalCount;
}
public double getMin() {
return min;
}
public double getMin() {
return min;
}
public double getMax() {
return max;
}
public double getMax() {
return max;
}
}

View File

@ -15,11 +15,11 @@
*/
package org.springframework.data.elasticsearch.core.facet.result;
import org.springframework.data.elasticsearch.core.facet.AbstactFacetResult;
import org.springframework.data.elasticsearch.core.facet.FacetType;
import java.util.List;
import org.springframework.data.elasticsearch.core.facet.AbstractFacetResult;
import org.springframework.data.elasticsearch.core.facet.FacetType;
/**
* Basic term facet result
*
@ -28,17 +28,16 @@ import java.util.List;
* @author Artur Konczak
* @author Jonathan Yan
*/
public class RangeResult extends AbstactFacetResult {
public class RangeResult extends AbstractFacetResult {
private List<Range> ranges;
private List<Range> ranges;
public RangeResult(String name, List<Range> ranges) {
super(name, FacetType.range);
this.ranges = ranges;
}
public List<Range> getRanges() {
return ranges;
}
public RangeResult(String name, List<Range> ranges) {
super(name, FacetType.range);
this.ranges = ranges;
}
public List<Range> getRanges() {
return ranges;
}
}

View File

@ -15,71 +15,71 @@
*/
package org.springframework.data.elasticsearch.core.facet.result;
import org.springframework.data.elasticsearch.core.facet.AbstactFacetResult;
import org.springframework.data.elasticsearch.core.facet.AbstractFacetResult;
import org.springframework.data.elasticsearch.core.facet.FacetType;
/**
* @author Petar Tahchiev
*/
public class StatisticalResult extends AbstactFacetResult {
public class StatisticalResult extends AbstractFacetResult {
private long count;
private long count;
private double max;
private double max;
private double min;
private double min;
private double mean;
private double mean;
private double stdDeviation;
private double stdDeviation;
private double sumOfSquares;
private double sumOfSquares;
private double total;
private double total;
private double variance;
private double variance;
public StatisticalResult(String name, long count, double max, double min, double mean, double stdDeviation, double sumOfSquares, double total, double variance) {
super(name, FacetType.statistical);
this.count = count;
this.max = max;
this.min = min;
this.mean = mean;
this.stdDeviation = stdDeviation;
this.sumOfSquares = sumOfSquares;
this.total = total;
this.variance = variance;
}
public StatisticalResult(String name, long count, double max, double min, double mean, double stdDeviation, double sumOfSquares, double total, double variance) {
super(name, FacetType.statistical);
this.count = count;
this.max = max;
this.min = min;
this.mean = mean;
this.stdDeviation = stdDeviation;
this.sumOfSquares = sumOfSquares;
this.total = total;
this.variance = variance;
}
public long getCount() {
return count;
}
public long getCount() {
return count;
}
public double getMax() {
return max;
}
public double getMax() {
return max;
}
public double getMin() {
return min;
}
public double getMin() {
return min;
}
public double getMean() {
return mean;
}
public double getMean() {
return mean;
}
public double getStdDeviation() {
return stdDeviation;
}
public double getStdDeviation() {
return stdDeviation;
}
public double getSumOfSquares() {
return sumOfSquares;
}
public double getSumOfSquares() {
return sumOfSquares;
}
public double getTotal() {
return total;
}
public double getTotal() {
return total;
}
public double getVariance() {
return variance;
}
public double getVariance() {
return variance;
}
}

View File

@ -25,19 +25,19 @@ package org.springframework.data.elasticsearch.core.facet.result;
*/
public class Term {
private String term;
private int count;
private String term;
private int count;
public Term(String term, int count) {
this.term = term;
this.count = count;
}
public Term(String term, int count) {
this.term = term;
this.count = count;
}
public String getTerm() {
return term;
}
public String getTerm() {
return term;
}
public int getCount() {
return count;
}
public int getCount() {
return count;
}
}

View File

@ -15,11 +15,11 @@
*/
package org.springframework.data.elasticsearch.core.facet.result;
import org.springframework.data.elasticsearch.core.facet.AbstactFacetResult;
import org.springframework.data.elasticsearch.core.facet.FacetType;
import java.util.List;
import org.springframework.data.elasticsearch.core.facet.AbstractFacetResult;
import org.springframework.data.elasticsearch.core.facet.FacetType;
/**
* Basic term facet result
*
@ -28,17 +28,16 @@ import java.util.List;
* @author Artur Konczak
* @author Jonathan Yan
*/
public class TermResult extends AbstactFacetResult {
public class TermResult extends AbstractFacetResult {
private List<Term> terms;
private List<Term> terms;
public TermResult(String name, List<Term> terms) {
super(name, FacetType.term);
this.terms = terms;
}
public List<Term> getTerms() {
return terms;
}
public TermResult(String name, List<Term> terms) {
super(name, FacetType.term);
this.terms = terms;
}
public List<Term> getTerms() {
return terms;
}
}

View File

@ -22,20 +22,19 @@ package org.springframework.data.elasticsearch.core.geo;
*/
public class GeoBox {
private GeoPoint topLeft;
private GeoPoint bottomRight;
private GeoPoint topLeft;
private GeoPoint bottomRight;
public GeoBox(GeoPoint topLeft, GeoPoint bottomRight) {
this.topLeft = topLeft;
this.bottomRight = bottomRight;
}
public GeoBox(GeoPoint topLeft, GeoPoint bottomRight) {
this.topLeft = topLeft;
this.bottomRight = bottomRight;
}
public GeoPoint getTopLeft() {
return topLeft;
}
public GeoPoint getBottomRight() {
return bottomRight;
}
public GeoPoint getTopLeft() {
return topLeft;
}
public GeoPoint getBottomRight() {
return bottomRight;
}
}

View File

@ -22,24 +22,23 @@ package org.springframework.data.elasticsearch.core.geo;
*/
public class GeoPoint {
private double lat;
private double lon;
private double lat;
private double lon;
private GeoPoint() {
//required by mapper to instantiate object
}
private GeoPoint() {
//required by mapper to instantiate object
}
public GeoPoint(double latitude, double longitude) {
this.lat = latitude;
this.lon = longitude;
}
public GeoPoint(double latitude, double longitude) {
this.lat = latitude;
this.lon = longitude;
}
public double getLat() {
return lat;
}
public double getLon() {
return lon;
}
public double getLat() {
return lat;
}
public double getLon() {
return lon;
}
}

View File

@ -19,7 +19,7 @@ import org.springframework.data.mapping.PersistentEntity;
/**
* ElasticsearchPersistentEntity
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -39,8 +39,8 @@ public interface ElasticsearchPersistentEntity<T> extends PersistentEntity<T, El
String getIndexStoreType();
ElasticsearchPersistentProperty getVersionProperty();
String getParentType();
ElasticsearchPersistentProperty getParentIdProperty();
}

View File

@ -20,7 +20,7 @@ import org.springframework.data.mapping.PersistentProperty;
/**
* ElasticsearchPersistentProperty
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -37,5 +37,4 @@ public interface ElasticsearchPersistentProperty extends PersistentProperty<Elas
return source.getFieldName();
}
}
}

View File

@ -15,16 +15,16 @@
*/
package org.springframework.data.elasticsearch.core.mapping;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Field;
import org.springframework.data.mapping.context.AbstractMappingContext;
import org.springframework.data.mapping.model.SimpleTypeHolder;
import org.springframework.data.util.TypeInformation;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Field;
/**
* SimpleElasticsearchMappingContext
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -39,8 +39,7 @@ public class SimpleElasticsearchMappingContext extends
@Override
protected ElasticsearchPersistentProperty createPersistentProperty(Field field, PropertyDescriptor descriptor,
SimpleElasticsearchPersistentEntity<?> owner, SimpleTypeHolder simpleTypeHolder) {
SimpleElasticsearchPersistentEntity<?> owner, SimpleTypeHolder simpleTypeHolder) {
return new SimpleElasticsearchPersistentProperty(field, descriptor, owner, simpleTypeHolder);
}
}

View File

@ -15,6 +15,10 @@
*/
package org.springframework.data.elasticsearch.core.mapping;
import static org.springframework.util.StringUtils.*;
import java.util.Locale;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
@ -27,15 +31,10 @@ import org.springframework.data.util.TypeInformation;
import org.springframework.expression.spel.support.StandardEvaluationContext;
import org.springframework.util.Assert;
import java.util.Locale;
import static org.springframework.util.StringUtils.hasText;
/**
* Elasticsearch specific {@link org.springframework.data.mapping.PersistentEntity} implementation holding
*
*
* @param <T>
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -119,7 +118,7 @@ public class SimpleElasticsearchPersistentEntity<T> extends BasicPersistentEntit
@Override
public void addPersistentProperty(ElasticsearchPersistentProperty property) {
super.addPersistentProperty(property);
Parent parent = property.getField().getAnnotation(Parent.class);
if (parent != null) {
Assert.isNull(this.parentIdProperty, "Only one field can hold a @Parent annotation");
@ -128,7 +127,7 @@ public class SimpleElasticsearchPersistentEntity<T> extends BasicPersistentEntit
this.parentIdProperty = property;
this.parentType = parent.type();
}
if (property.isVersionProperty()) {
Assert.isTrue(property.getType() == Long.class, "Version property should be Long");
}

View File

@ -15,19 +15,19 @@
*/
package org.springframework.data.elasticsearch.core.mapping;
import org.springframework.data.mapping.Association;
import org.springframework.data.mapping.PersistentEntity;
import org.springframework.data.mapping.model.AnnotationBasedPersistentProperty;
import org.springframework.data.mapping.model.SimpleTypeHolder;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Field;
import java.util.HashSet;
import java.util.Set;
import org.springframework.data.mapping.Association;
import org.springframework.data.mapping.PersistentEntity;
import org.springframework.data.mapping.model.AnnotationBasedPersistentProperty;
import org.springframework.data.mapping.model.SimpleTypeHolder;
/**
* Elasticsearch specific {@link org.springframework.data.mapping.PersistentProperty} implementation processing
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -44,7 +44,7 @@ public class SimpleElasticsearchPersistentProperty extends
}
public SimpleElasticsearchPersistentProperty(Field field, PropertyDescriptor propertyDescriptor,
PersistentEntity<?, ElasticsearchPersistentProperty> owner, SimpleTypeHolder simpleTypeHolder) {
PersistentEntity<?, ElasticsearchPersistentProperty> owner, SimpleTypeHolder simpleTypeHolder) {
super(field, propertyDescriptor, owner, simpleTypeHolder);
}
@ -62,5 +62,4 @@ public class SimpleElasticsearchPersistentProperty extends
protected Association<ElasticsearchPersistentProperty> createAssociation() {
return null;
}
}

View File

@ -15,18 +15,18 @@
*/
package org.springframework.data.elasticsearch.core.query;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.util.Assert;
import static org.apache.commons.collections.CollectionUtils.*;
import java.util.ArrayList;
import java.util.List;
import static org.apache.commons.collections.CollectionUtils.addAll;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.util.Assert;
/**
* AbstractQuery
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -37,7 +37,7 @@ abstract class AbstractQuery implements Query {
protected List<String> indices = new ArrayList<String>();
protected List<String> types = new ArrayList<String>();
protected List<String> fields = new ArrayList<String>();
protected float minScore;
protected float minScore;
@Override
public Sort getSort() {
@ -101,11 +101,11 @@ abstract class AbstractQuery implements Query {
return (T) this;
}
public float getMinScore() {
return minScore;
}
public float getMinScore() {
return minScore;
}
public void setMinScore(float minScore) {
this.minScore = minScore;
}
public void setMinScore(float minScore) {
this.minScore = minScore;
}
}

View File

@ -15,46 +15,46 @@
*/
package org.springframework.data.elasticsearch.core.query;
import java.util.Map;
import org.elasticsearch.index.query.FilterBuilder;
import java.util.Map;
/**
*
* @author Mohsin Husen
*/
public class AliasBuilder {
private String indexName;
private String aliasName;
private FilterBuilder filterBuilder;
private Map<String, Object> filter;
private String indexName;
private String aliasName;
private FilterBuilder filterBuilder;
private Map<String, Object> filter;
public AliasBuilder withIndexName(String indexName){
this.indexName = indexName;
return this;
}
public AliasBuilder withIndexName(String indexName) {
this.indexName = indexName;
return this;
}
public AliasBuilder withAliasName(String aliasName){
this.aliasName = aliasName;
return this;
}
public AliasBuilder withAliasName(String aliasName) {
this.aliasName = aliasName;
return this;
}
public AliasBuilder withFilterBuilder(FilterBuilder filterBuilder){
this.filterBuilder = filterBuilder;
return this;
}
public AliasBuilder withFilterBuilder(FilterBuilder filterBuilder) {
this.filterBuilder = filterBuilder;
return this;
}
public AliasBuilder withFilter(Map<String, Object> filter){
this.filter = filter;
return this;
}
public AliasBuilder withFilter(Map<String, Object> filter) {
this.filter = filter;
return this;
}
public AliasQuery build(){
AliasQuery aliasQuery = new AliasQuery();
aliasQuery.setIndexName(indexName);
aliasQuery.setAliasName(aliasName);
aliasQuery.setFilterBuilder(filterBuilder);
aliasQuery.setFilter(filter);
return aliasQuery;
}
public AliasQuery build() {
AliasQuery aliasQuery = new AliasQuery();
aliasQuery.setIndexName(indexName);
aliasQuery.setAliasName(aliasName);
aliasQuery.setFilterBuilder(filterBuilder);
aliasQuery.setFilter(filter);
return aliasQuery;
}
}

View File

@ -14,49 +14,52 @@
* limitations under the License.
*/
package org.springframework.data.elasticsearch.core.query;
import org.elasticsearch.index.query.FilterBuilder;
import java.util.Map;
import org.elasticsearch.index.query.FilterBuilder;
/**
* AliasQuery is useful for creating new alias or deleting existing ones
* AliasQuery is useful for creating new alias or deleting existing ones
*
* @author Mohsin Husen
*/
public class AliasQuery {
private String indexName;
private String aliasName;
private FilterBuilder filterBuilder;
private Map<String, Object> filter;
private String indexName;
private String aliasName;
private FilterBuilder filterBuilder;
private Map<String, Object> filter;
public String getIndexName() {
return indexName;
}
public String getIndexName() {
return indexName;
}
public void setIndexName(String indexName) {
this.indexName = indexName;
}
public void setIndexName(String indexName) {
this.indexName = indexName;
}
public String getAliasName() {
return aliasName;
}
public String getAliasName() {
return aliasName;
}
public void setAliasName(String aliasName) {
this.aliasName = aliasName;
}
public void setAliasName(String aliasName) {
this.aliasName = aliasName;
}
public FilterBuilder getFilterBuilder() {
return filterBuilder;
}
public FilterBuilder getFilterBuilder() {
return filterBuilder;
}
public void setFilterBuilder(FilterBuilder filterBuilder) {
this.filterBuilder = filterBuilder;
}
public void setFilterBuilder(FilterBuilder filterBuilder) {
this.filterBuilder = filterBuilder;
}
public Map<String, Object> getFilter() {
return filter;
}
public Map<String, Object> getFilter() {
return filter;
}
public void setFilter(Map<String, Object> filter) {
this.filter = filter;
}
public void setFilter(Map<String, Object> filter) {
this.filter = filter;
}
}

View File

@ -20,7 +20,7 @@ import org.springframework.util.Assert;
/**
* CriteriaQuery
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -34,48 +34,47 @@ public class CriteriaQuery extends AbstractQuery {
public CriteriaQuery(Criteria criteria) {
this(criteria, null);
}
public CriteriaQuery(Criteria criteria, Pageable pageable) {
this.criteria = criteria;
this.pageable = pageable;
if (pageable != null) {
this.addSort(pageable.getSort());
}
}
public static final Query fromQuery(CriteriaQuery source) {
return fromQuery(source, new CriteriaQuery());
}
public CriteriaQuery(Criteria criteria, Pageable pageable) {
this.criteria = criteria;
this.pageable = pageable;
if (pageable != null) {
this.addSort(pageable.getSort());
}
}
public static <T extends CriteriaQuery> T fromQuery(CriteriaQuery source, T destination) {
if (source == null || destination == null) {
return null;
}
public static final Query fromQuery(CriteriaQuery source) {
return fromQuery(source, new CriteriaQuery());
}
if (source.getCriteria() != null) {
destination.addCriteria(source.getCriteria());
}
public static <T extends CriteriaQuery> T fromQuery(CriteriaQuery source, T destination) {
if (source == null || destination == null) {
return null;
}
if (source.getSort() != null) {
destination.addSort(source.getSort());
}
if (source.getCriteria() != null) {
destination.addCriteria(source.getCriteria());
}
return destination;
}
if (source.getSort() != null) {
destination.addSort(source.getSort());
}
@SuppressWarnings("unchecked")
public final <T extends CriteriaQuery> T addCriteria(Criteria criteria) {
Assert.notNull(criteria, "Cannot add null criteria.");
if (this.criteria == null) {
this.criteria = criteria;
} else {
this.criteria.and(criteria);
}
return (T) this;
}
return destination;
}
public Criteria getCriteria() {
return this.criteria;
}
@SuppressWarnings("unchecked")
public final <T extends CriteriaQuery> T addCriteria(Criteria criteria) {
Assert.notNull(criteria, "Cannot add null criteria.");
if (this.criteria == null) {
this.criteria = criteria;
} else {
this.criteria.and(criteria);
}
return (T) this;
}
public Criteria getCriteria() {
return this.criteria;
}
}

View File

@ -26,8 +26,8 @@ import org.elasticsearch.index.query.QueryBuilder;
public class DeleteQuery {
private QueryBuilder query;
private String index;
private String type;
private String index;
private String type;
public QueryBuilder getQuery() {
return query;
@ -35,21 +35,21 @@ public class DeleteQuery {
public void setQuery(QueryBuilder query) {
this.query = query;
}
}
public String getIndex() {
return index;
}
public String getIndex() {
return index;
}
public void setIndex(String index) {
this.index = index;
}
public void setIndex(String index) {
this.index = index;
}
public String getType() {
return type;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
public void setType(String type) {
this.type = type;
}
}

View File

@ -17,7 +17,7 @@ package org.springframework.data.elasticsearch.core.query;
/**
* Defines a Field that can be used within a Criteria.
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -25,9 +25,8 @@ public interface Field {
/**
* Get the name of the field used in schema.xml of elasticsearch server
*
*
* @return
*/
String getName();
}

View File

@ -17,7 +17,7 @@ package org.springframework.data.elasticsearch.core.query;
/**
* GetQuery
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/

View File

@ -17,7 +17,7 @@ package org.springframework.data.elasticsearch.core.query;
/**
* IndexQuery
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -29,7 +29,7 @@ public class IndexQuery {
private Long version;
private String indexName;
private String type;
private String source;
private String source;
private String parentId;
public String getId() {
@ -72,13 +72,13 @@ public class IndexQuery {
this.type = type;
}
public String getSource() {
return source;
}
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = source;
}
public void setSource(String source) {
this.source = source;
}
public String getParentId() {
return parentId;

View File

@ -15,17 +15,17 @@
*/
package org.springframework.data.elasticsearch.core.query;
import org.springframework.data.domain.Pageable;
import static org.apache.commons.collections.CollectionUtils.*;
import static org.springframework.data.elasticsearch.core.query.AbstractQuery.*;
import java.util.ArrayList;
import java.util.List;
import static org.apache.commons.collections.CollectionUtils.addAll;
import static org.springframework.data.elasticsearch.core.query.AbstractQuery.DEFAULT_PAGE;
import org.springframework.data.domain.Pageable;
/**
* MoreLikeThisQuery
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/

View File

@ -15,18 +15,18 @@
*/
package org.springframework.data.elasticsearch.core.query;
import java.util.ArrayList;
import java.util.List;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.springframework.data.elasticsearch.core.facet.FacetRequest;
import java.util.ArrayList;
import java.util.List;
/**
* NativeSearchQuery
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
* @author Artur Konczak
@ -36,18 +36,18 @@ public class NativeSearchQuery extends AbstractQuery implements SearchQuery {
private QueryBuilder query;
private FilterBuilder filter;
private SortBuilder sort;
private List<FacetRequest> facets;
private HighlightBuilder.Field[] highlightFields;
private List<FacetRequest> facets;
private HighlightBuilder.Field[] highlightFields;
public NativeSearchQuery(QueryBuilder query) {
this.query = query;
}
public NativeSearchQuery(QueryBuilder query) {
this.query = query;
}
public NativeSearchQuery(QueryBuilder query, FilterBuilder filter) {
this.query = query;
this.filter = filter;
}
public NativeSearchQuery(QueryBuilder query, FilterBuilder filter) {
this.query = query;
this.filter = filter;
}
public NativeSearchQuery(QueryBuilder query, FilterBuilder filter, SortBuilder sort) {
this.query = query;
@ -55,12 +55,12 @@ public class NativeSearchQuery extends AbstractQuery implements SearchQuery {
this.sort = sort;
}
public NativeSearchQuery(QueryBuilder query, FilterBuilder filter, SortBuilder sort, HighlightBuilder.Field[] highlightFields) {
this.query = query;
this.filter = filter;
this.sort = sort;
this.highlightFields = highlightFields;
}
public NativeSearchQuery(QueryBuilder query, FilterBuilder filter, SortBuilder sort, HighlightBuilder.Field[] highlightFields) {
this.query = query;
this.filter = filter;
this.sort = sort;
this.highlightFields = highlightFields;
}
public QueryBuilder getQuery() {
return query;
@ -72,26 +72,26 @@ public class NativeSearchQuery extends AbstractQuery implements SearchQuery {
public SortBuilder getElasticsearchSort() {
return sort;
}
@Override
public HighlightBuilder.Field[] getHighlightFields() {
return highlightFields;
}
public void addFacet(FacetRequest facetRequest){
if(facets==null){
facets = new ArrayList<FacetRequest>();
}
facets.add(facetRequest);
}
@Override
public HighlightBuilder.Field[] getHighlightFields() {
return highlightFields;
}
public void setFacets(List<FacetRequest> facets){
this.facets = facets;
}
public void addFacet(FacetRequest facetRequest) {
if (facets == null) {
facets = new ArrayList<FacetRequest>();
}
facets.add(facetRequest);
}
@Override
public List<FacetRequest> getFacets() {
return facets;
}
public void setFacets(List<FacetRequest> facets) {
this.facets = facets;
}
@Override
public List<FacetRequest> getFacets() {
return facets;
}
}

View File

@ -15,6 +15,9 @@
*/
package org.springframework.data.elasticsearch.core.query;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.query.QueryBuilder;
@ -23,9 +26,6 @@ import org.elasticsearch.search.sort.SortBuilder;
import org.springframework.data.domain.Pageable;
import org.springframework.data.elasticsearch.core.facet.FacetRequest;
import java.util.ArrayList;
import java.util.List;
/**
* NativeSearchQuery
*
@ -36,88 +36,88 @@ import java.util.List;
public class NativeSearchQueryBuilder {
private QueryBuilder queryBuilder;
private FilterBuilder filterBuilder;
private SortBuilder sortBuilder;
private List<FacetRequest> facetRequests = new ArrayList<FacetRequest>();
private HighlightBuilder.Field[] highlightFields;
private Pageable pageable;
private String[] indices;
private String[] types;
private String[] fields;
private float minScore;
private QueryBuilder queryBuilder;
private FilterBuilder filterBuilder;
private SortBuilder sortBuilder;
private List<FacetRequest> facetRequests = new ArrayList<FacetRequest>();
private HighlightBuilder.Field[] highlightFields;
private Pageable pageable;
private String[] indices;
private String[] types;
private String[] fields;
private float minScore;
public NativeSearchQueryBuilder withQuery(QueryBuilder queryBuilder) {
this.queryBuilder = queryBuilder;
return this;
}
public NativeSearchQueryBuilder withQuery(QueryBuilder queryBuilder) {
this.queryBuilder = queryBuilder;
return this;
}
public NativeSearchQueryBuilder withFilter(FilterBuilder filterBuilder) {
this.filterBuilder = filterBuilder;
return this;
}
public NativeSearchQueryBuilder withFilter(FilterBuilder filterBuilder) {
this.filterBuilder = filterBuilder;
return this;
}
public NativeSearchQueryBuilder withSort(SortBuilder sortBuilder) {
this.sortBuilder = sortBuilder;
return this;
}
public NativeSearchQueryBuilder withSort(SortBuilder sortBuilder) {
this.sortBuilder = sortBuilder;
return this;
}
public NativeSearchQueryBuilder withFacet(FacetRequest facetRequest) {
facetRequests.add(facetRequest);
return this;
}
public NativeSearchQueryBuilder withFacet(FacetRequest facetRequest) {
facetRequests.add(facetRequest);
return this;
}
public NativeSearchQueryBuilder withHighlightFields(HighlightBuilder.Field... highlightFields){
this.highlightFields = highlightFields;
return this;
}
public NativeSearchQueryBuilder withHighlightFields(HighlightBuilder.Field... highlightFields) {
this.highlightFields = highlightFields;
return this;
}
public NativeSearchQueryBuilder withPageable(Pageable pageable) {
this.pageable = pageable;
return this;
}
public NativeSearchQueryBuilder withPageable(Pageable pageable) {
this.pageable = pageable;
return this;
}
public NativeSearchQueryBuilder withIndices(String... indices) {
this.indices = indices;
return this;
}
public NativeSearchQueryBuilder withIndices(String... indices) {
this.indices = indices;
return this;
}
public NativeSearchQueryBuilder withTypes(String... types) {
this.types = types;
return this;
}
public NativeSearchQueryBuilder withTypes(String... types) {
this.types = types;
return this;
}
public NativeSearchQueryBuilder withFields(String... fields) {
this.fields = fields;
return this;
}
public NativeSearchQueryBuilder withFields(String... fields) {
this.fields = fields;
return this;
}
public NativeSearchQueryBuilder withMinScore(float minScore) {
this.minScore = minScore;
return this;
}
public NativeSearchQueryBuilder withMinScore(float minScore) {
this.minScore = minScore;
return this;
}
public NativeSearchQuery build() {
NativeSearchQuery nativeSearchQuery = new NativeSearchQuery(queryBuilder, filterBuilder, sortBuilder, highlightFields);
if (pageable != null) {
nativeSearchQuery.setPageable(pageable);
}
if (indices != null) {
nativeSearchQuery.addIndices(indices);
}
if (types != null) {
nativeSearchQuery.addTypes(types);
}
if (fields != null) {
nativeSearchQuery.addFields(fields);
}
if (CollectionUtils.isNotEmpty(facetRequests)) {
nativeSearchQuery.setFacets(facetRequests);
}
public NativeSearchQuery build() {
NativeSearchQuery nativeSearchQuery = new NativeSearchQuery(queryBuilder, filterBuilder, sortBuilder, highlightFields);
if (pageable != null) {
nativeSearchQuery.setPageable(pageable);
}
if (indices != null) {
nativeSearchQuery.addIndices(indices);
}
if (types != null) {
nativeSearchQuery.addTypes(types);
}
if (fields != null) {
nativeSearchQuery.addFields(fields);
}
if (CollectionUtils.isNotEmpty(facetRequests)) {
nativeSearchQuery.setFacets(facetRequests);
}
if(minScore>0){
nativeSearchQuery.setMinScore(minScore);
}
return nativeSearchQuery;
}
if (minScore > 0) {
nativeSearchQuery.setMinScore(minScore);
}
return nativeSearchQuery;
}
}

View File

@ -15,15 +15,15 @@
*/
package org.springframework.data.elasticsearch.core.query;
import java.util.List;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import java.util.List;
/**
* Query
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -34,7 +34,7 @@ public interface Query {
/**
* restrict result to entries on given page. Corresponds to the 'start' and 'rows' parameter in elasticsearch
*
*
* @param pageable
* @return
*/
@ -42,21 +42,21 @@ public interface Query {
/**
* Get filter queries if defined
*
*
* @return
*/
// List<FilterQuery> getFilterQueries();
/**
* Get page settings if defined
*
*
* @return
*/
Pageable getPageable();
/**
* Add {@link org.springframework.data.domain.Sort} to query
*
*
* @param sort
* @return
*/
@ -69,49 +69,50 @@ public interface Query {
/**
* Get Indices to be searched
*
*
* @return
*/
List<String> getIndices();
/**
* Add Indices to be added as part of search request
*
*
* @param indices
*/
void addIndices(String... indices);
/**
* Add types to be searched
*
*
* @param types
*/
void addTypes(String... types);
/**
* Get types to be searched
*
*
* @return
*/
List<String> getTypes();
/**
* Add fields to be added as part of search request
*
*
* @param fields
*/
void addFields(String... fields);
/**
* Get fields to be returned as part of search request
*
*
* @return
*/
List<String> getFields();
/**
* Get minimum score
* @return
*/
float getMinScore();
/**
* Get minimum score
*
* @return
*/
float getMinScore();
}

View File

@ -15,29 +15,30 @@
*/
package org.springframework.data.elasticsearch.core.query;
import java.util.List;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.springframework.data.elasticsearch.core.facet.FacetRequest;
import java.util.List;
/**
* NativeSearchQuery
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
* @author Artur Konczak
*/
public interface SearchQuery extends Query {
QueryBuilder getQuery();
FilterBuilder getFilter();
SortBuilder getElasticsearchSort();
List<FacetRequest> getFacets();
List<FacetRequest> getFacets();
HighlightBuilder.Field[] getHighlightFields();
HighlightBuilder.Field[] getHighlightFields();
}

View File

@ -17,7 +17,7 @@ package org.springframework.data.elasticsearch.core.query;
/**
* The most trivial implementation of a Field
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -38,5 +38,4 @@ public class SimpleField implements Field {
public String toString() {
return this.name;
}
}

View File

@ -20,7 +20,7 @@ import org.springframework.data.domain.Sort;
/**
* StringQuery
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -46,5 +46,4 @@ public class StringQuery extends AbstractQuery {
public String getSource() {
return source;
}
}

View File

@ -23,58 +23,58 @@ import org.elasticsearch.action.index.IndexRequest;
*/
public class UpdateQuery {
private String id;
private IndexRequest indexRequest;
private String indexName;
private String type;
private Class clazz;
private boolean doUpsert;
private String id;
private IndexRequest indexRequest;
private String indexName;
private String type;
private Class clazz;
private boolean doUpsert;
public String getId() {
return id;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public void setId(String id) {
this.id = id;
}
public IndexRequest getIndexRequest() {
return indexRequest;
}
public IndexRequest getIndexRequest() {
return indexRequest;
}
public void setIndexRequest(IndexRequest indexRequest) {
this.indexRequest = indexRequest;
}
public void setIndexRequest(IndexRequest indexRequest) {
this.indexRequest = indexRequest;
}
public String getIndexName() {
return indexName;
}
public String getIndexName() {
return indexName;
}
public void setIndexName(String indexName) {
this.indexName = indexName;
}
public void setIndexName(String indexName) {
this.indexName = indexName;
}
public String getType() {
return type;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public void setType(String type) {
this.type = type;
}
public Class getClazz() {
return clazz;
}
public Class getClazz() {
return clazz;
}
public void setClazz(Class clazz) {
this.clazz = clazz;
}
public void setClazz(Class clazz) {
this.clazz = clazz;
}
public boolean DoUpsert() {
return doUpsert;
}
public boolean DoUpsert() {
return doUpsert;
}
public void setDoUpsert(boolean doUpsert) {
this.doUpsert = doUpsert;
}
public void setDoUpsert(boolean doUpsert) {
this.doUpsert = doUpsert;
}
}

View File

@ -23,51 +23,51 @@ import org.elasticsearch.action.index.IndexRequest;
*/
public class UpdateQueryBuilder {
private String id;
private IndexRequest indexRequest;
private String indexName;
private String type;
private Class clazz;
private boolean doUpsert;
private String id;
private IndexRequest indexRequest;
private String indexName;
private String type;
private Class clazz;
private boolean doUpsert;
public UpdateQueryBuilder withId(String id){
this.id = id;
return this;
}
public UpdateQueryBuilder withId(String id) {
this.id = id;
return this;
}
public UpdateQueryBuilder withIndexRequest(IndexRequest indexRequest){
this.indexRequest = indexRequest;
return this;
}
public UpdateQueryBuilder withIndexRequest(IndexRequest indexRequest) {
this.indexRequest = indexRequest;
return this;
}
public UpdateQueryBuilder withIndexName(String indexName){
this.indexName = indexName;
return this;
}
public UpdateQueryBuilder withIndexName(String indexName) {
this.indexName = indexName;
return this;
}
public UpdateQueryBuilder withType(String type){
this.type = type;
return this;
}
public UpdateQueryBuilder withType(String type) {
this.type = type;
return this;
}
public UpdateQueryBuilder withClass(Class clazz){
this.clazz = clazz;
return this;
}
public UpdateQueryBuilder withClass(Class clazz) {
this.clazz = clazz;
return this;
}
public UpdateQueryBuilder withDoUpsert(boolean doUpsert){
this.doUpsert = doUpsert;
return this;
}
public UpdateQueryBuilder withDoUpsert(boolean doUpsert) {
this.doUpsert = doUpsert;
return this;
}
public UpdateQuery build(){
UpdateQuery updateQuery = new UpdateQuery();
updateQuery.setId(id);
updateQuery.setIndexName(indexName);
updateQuery.setType(type);
updateQuery.setClazz(clazz);
updateQuery.setIndexRequest(indexRequest);
updateQuery.setDoUpsert(doUpsert);
return updateQuery;
}
public UpdateQuery build() {
UpdateQuery updateQuery = new UpdateQuery();
updateQuery.setId(id);
updateQuery.setIndexName(indexName);
updateQuery.setType(type);
updateQuery.setClazz(clazz);
updateQuery.setIndexRequest(indexRequest);
updateQuery.setDoUpsert(doUpsert);
return updateQuery;
}
}

View File

@ -23,7 +23,6 @@ import org.springframework.data.repository.PagingAndSortingRepository;
/**
* @param <T>
* @param <ID>
*
* @author Rizwan Idrees
* @author Mohsin Husen
* @author Oliver Gierke

View File

@ -15,6 +15,8 @@
*/
package org.springframework.data.elasticsearch.repository;
import java.io.Serializable;
import org.elasticsearch.index.query.QueryBuilder;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
@ -22,12 +24,9 @@ import org.springframework.data.elasticsearch.core.FacetedPage;
import org.springframework.data.elasticsearch.core.query.SearchQuery;
import org.springframework.data.repository.NoRepositoryBean;
import java.io.Serializable;
/**
* @param <T>
* @param <ID>
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -40,7 +39,7 @@ public interface ElasticsearchRepository<T, ID extends Serializable> extends Ela
FacetedPage<T> search(QueryBuilder query, Pageable pageable);
FacetedPage<T> search(SearchQuery searchQuery);
FacetedPage<T> search(SearchQuery searchQuery);
Page<T> searchSimilar(T entity,String[] fields, Pageable pageable);
Page<T> searchSimilar(T entity, String[] fields, Pageable pageable);
}

View File

@ -15,20 +15,20 @@
*/
package org.springframework.data.elasticsearch.repository.cdi;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.repository.support.ElasticsearchRepositoryFactory;
import org.springframework.data.repository.cdi.CdiRepositoryBean;
import org.springframework.util.Assert;
import javax.enterprise.context.spi.CreationalContext;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.inject.spi.BeanManager;
import java.lang.annotation.Annotation;
import java.util.Set;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.repository.support.ElasticsearchRepositoryFactory;
import org.springframework.data.repository.cdi.CdiRepositoryBean;
import org.springframework.util.Assert;
/**
* Uses CdiRepositoryBean to create ElasticsearchRepository instances.
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -37,7 +37,7 @@ public class ElasticsearchRepositoryBean<T> extends CdiRepositoryBean<T> {
private final Bean<ElasticsearchOperations> elasticsearchOperationsBean;
public ElasticsearchRepositoryBean(Bean<ElasticsearchOperations> operations, Set<Annotation> qualifiers,
Class<T> repositoryType, BeanManager beanManager) {
Class<T> repositoryType, BeanManager beanManager) {
super(qualifiers, repositoryType, beanManager);
Assert.notNull(operations, "Cannot create repository with 'null' for ElasticsearchOperations.");
@ -55,5 +55,4 @@ public class ElasticsearchRepositoryBean<T> extends CdiRepositoryBean<T> {
public Class<? extends Annotation> getScope() {
return elasticsearchOperationsBean.getScope();
}
}

View File

@ -15,10 +15,6 @@
*/
package org.springframework.data.elasticsearch.repository.cdi;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.repository.cdi.CdiRepositoryBean;
import org.springframework.data.repository.cdi.CdiRepositoryExtensionSupport;
import javax.enterprise.event.Observes;
import javax.enterprise.inject.UnsatisfiedResolutionException;
import javax.enterprise.inject.spi.AfterBeanDiscovery;
@ -32,9 +28,13 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.repository.cdi.CdiRepositoryBean;
import org.springframework.data.repository.cdi.CdiRepositoryExtensionSupport;
/**
* ElasticsearchRepositoryExtension
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
* @author Oliver Gierke
@ -66,7 +66,7 @@ public class ElasticsearchRepositoryExtension extends CdiRepositoryExtensionSupp
}
private <T> CdiRepositoryBean<T> createRepositoryBean(Class<T> repositoryType, Set<Annotation> qualifiers, BeanManager beanManager) {
Bean<ElasticsearchOperations> elasticsearchOperationsBean = this.elasticsearchOperationsMap.get(qualifiers
.toString());
@ -74,7 +74,7 @@ public class ElasticsearchRepositoryExtension extends CdiRepositoryExtensionSupp
throw new UnsatisfiedResolutionException(String.format("Unable to resolve a bean for '%s' with qualifiers %s.",
ElasticsearchOperations.class.getName(), qualifiers));
}
return new ElasticsearchRepositoryBean<T>(elasticsearchOperationsBean, qualifiers, repositoryType, beanManager);
}
}

View File

@ -15,15 +15,15 @@
*/
package org.springframework.data.elasticsearch.repository.config;
import java.lang.annotation.Annotation;
import org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport;
import org.springframework.data.repository.config.RepositoryConfigurationExtension;
import java.lang.annotation.Annotation;
/**
* {@link org.springframework.context.annotation.ImportBeanDefinitionRegistrar} implementation to trigger configuration
* of the {@link EnableElasticsearchRepositories} annotation.
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/

View File

@ -27,7 +27,7 @@ import org.w3c.dom.Element;
* {@link org.springframework.data.repository.config.RepositoryConfigurationExtension} implementation to configure
* Elasticsearch repository configuration support, evaluating the {@link EnableElasticsearchRepositories} annotation or
* the equivalent XML element.
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/

View File

@ -15,18 +15,18 @@
*/
package org.springframework.data.elasticsearch.repository.config;
import java.lang.annotation.*;
import org.springframework.context.annotation.ComponentScan.Filter;
import org.springframework.context.annotation.Import;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.data.elasticsearch.repository.support.ElasticsearchRepositoryFactoryBean;
import org.springframework.data.repository.query.QueryLookupStrategy.Key;
import java.lang.annotation.*;
/**
* Annotation to enable Elasticsearch repositories. Will scan the package of the annotated configuration class for
* Spring Data repositories by default.
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -72,7 +72,7 @@ public @interface EnableElasticsearchRepositories {
* Returns the postfix to be used when looking up custom repository implementations. Defaults to {@literal Impl}. So
* for a repository named {@code PersonRepository} the corresponding implementation class will be looked up scanning
* for {@code PersonRepositoryImpl}.
*
*
* @return
*/
String repositoryImplementationPostfix() default "Impl";
@ -80,7 +80,7 @@ public @interface EnableElasticsearchRepositories {
/**
* Configures the location of where to find the Spring Data named queries properties file. Will default to
* {@code META-INFO/elasticsearch-named-queries.properties}.
*
*
* @return
*/
String namedQueriesLocation() default "";
@ -89,7 +89,7 @@ public @interface EnableElasticsearchRepositories {
* Returns the key of the {@link org.springframework.data.repository.query.QueryLookupStrategy} to be used for lookup
* queries for query methods. Defaults to
* {@link org.springframework.data.repository.query.QueryLookupStrategy.Key#CREATE_IF_NOT_FOUND}.
*
*
* @return
*/
Key queryLookupStrategy() default Key.CREATE_IF_NOT_FOUND;
@ -97,7 +97,7 @@ public @interface EnableElasticsearchRepositories {
/**
* Returns the {@link org.springframework.beans.factory.FactoryBean} class to be used for each repository instance.
* Defaults to {@link ElasticsearchRepositoryFactoryBean}.
*
*
* @return
*/
Class<?> repositoryFactoryBeanClass() default ElasticsearchRepositoryFactoryBean.class;
@ -107,7 +107,7 @@ public @interface EnableElasticsearchRepositories {
/**
* Configures the name of the {@link ElasticsearchTemplate} bean definition to be used to create repositories
* discovered through this annotation. Defaults to {@code elasticsearchTemplate}.
*
*
* @return
*/
String elasticsearchTemplateRef() default "elasticsearchTemplate";

View File

@ -21,7 +21,7 @@ import org.springframework.data.repository.query.RepositoryQuery;
/**
* AbstractElasticsearchRepositoryQuery
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -32,7 +32,7 @@ public abstract class AbstractElasticsearchRepositoryQuery implements Repository
protected ElasticsearchOperations elasticsearchOperations;
public AbstractElasticsearchRepositoryQuery(ElasticsearchQueryMethod queryMethod,
ElasticsearchOperations elasticsearchOperations) {
ElasticsearchOperations elasticsearchOperations) {
this.queryMethod = queryMethod;
this.elasticsearchOperations = elasticsearchOperations;
}
@ -41,5 +41,4 @@ public abstract class AbstractElasticsearchRepositoryQuery implements Repository
public QueryMethod getQueryMethod() {
return queryMethod;
}
}

View File

@ -25,7 +25,7 @@ import org.springframework.data.repository.query.parser.PartTree;
/**
* ElasticsearchPartQuery
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/

View File

@ -15,6 +15,8 @@
*/
package org.springframework.data.elasticsearch.repository.query;
import java.lang.reflect.Method;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.data.elasticsearch.annotations.Query;
import org.springframework.data.elasticsearch.repository.support.ElasticsearchEntityInformation;
@ -23,11 +25,9 @@ import org.springframework.data.repository.core.RepositoryMetadata;
import org.springframework.data.repository.query.QueryMethod;
import org.springframework.util.StringUtils;
import java.lang.reflect.Method;
/**
* ElasticsearchQueryMethod
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -37,7 +37,7 @@ public class ElasticsearchQueryMethod extends QueryMethod {
private Method method;
public ElasticsearchQueryMethod(Method method, RepositoryMetadata metadata,
ElasticsearchEntityInformationCreator elasticsearchEntityInformationCreator) {
ElasticsearchEntityInformationCreator elasticsearchEntityInformationCreator) {
super(method, metadata);
this.entityInformation = elasticsearchEntityInformationCreator.getEntityInformation(metadata
.getReturnedDomainClass(method));
@ -56,5 +56,4 @@ public class ElasticsearchQueryMethod extends QueryMethod {
private Query getQueryAnnotation() {
return this.method.getAnnotation(Query.class);
}
}

View File

@ -15,6 +15,9 @@
*/
package org.springframework.data.elasticsearch.repository.query;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.data.elasticsearch.core.ElasticsearchOperations;
import org.springframework.data.elasticsearch.core.convert.DateTimeConverters;
@ -22,12 +25,9 @@ import org.springframework.data.elasticsearch.core.query.StringQuery;
import org.springframework.data.repository.query.ParametersParameterAccessor;
import org.springframework.util.Assert;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* ElasticsearchStringQuery
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -48,11 +48,10 @@ public class ElasticsearchStringQuery extends AbstractElasticsearchRepositoryQue
if (!conversionService.canConvert(org.joda.time.LocalDateTime.class, String.class)) {
conversionService.addConverter(DateTimeConverters.JodaLocalDateTimeConverter.INSTANCE);
}
}
public ElasticsearchStringQuery(ElasticsearchQueryMethod queryMethod,
ElasticsearchOperations elasticsearchOperations, String query) {
ElasticsearchOperations elasticsearchOperations, String query) {
super(queryMethod, elasticsearchOperations);
Assert.notNull(query, "Query cannot be empty");
this.query = query;

View File

@ -15,6 +15,9 @@
*/
package org.springframework.data.elasticsearch.repository.query.parser;
import java.util.Collection;
import java.util.Iterator;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.data.domain.Sort;
import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentProperty;
@ -27,12 +30,9 @@ import org.springframework.data.repository.query.parser.AbstractQueryCreator;
import org.springframework.data.repository.query.parser.Part;
import org.springframework.data.repository.query.parser.PartTree;
import java.util.Collection;
import java.util.Iterator;
/**
* ElasticsearchQueryCreator
*
*
* @author Rizwan Idrees
* @author Mohsin Husen
*/
@ -41,7 +41,7 @@ public class ElasticsearchQueryCreator extends AbstractQueryCreator<CriteriaQuer
private final MappingContext<?, ElasticsearchPersistentProperty> context;
public ElasticsearchQueryCreator(PartTree tree, ParameterAccessor parameters,
MappingContext<?, ElasticsearchPersistentProperty> context) {
MappingContext<?, ElasticsearchPersistentProperty> context) {
super(tree, parameters);
this.context = context;
}
@ -89,39 +89,39 @@ public class ElasticsearchQueryCreator extends AbstractQueryCreator<CriteriaQuer
criteria = new Criteria();
}
switch (type) {
case TRUE:
return criteria.is(true);
case FALSE:
return criteria.is(false);
case SIMPLE_PROPERTY:
return criteria.is(parameters.next());
case NEGATING_SIMPLE_PROPERTY:
return criteria.is(parameters.next()).not();
case REGEX:
return criteria.expression(parameters.next().toString());
case LIKE:
case STARTING_WITH:
return criteria.startsWith(parameters.next().toString());
case ENDING_WITH:
return criteria.endsWith(parameters.next().toString());
case CONTAINING:
return criteria.contains(parameters.next().toString());
case AFTER:
case GREATER_THAN:
case GREATER_THAN_EQUAL:
return criteria.greaterThanEqual(parameters.next());
case BEFORE:
case LESS_THAN:
case LESS_THAN_EQUAL:
return criteria.lessThanEqual(parameters.next());
case BETWEEN:
return criteria.between(parameters.next(), parameters.next());
case IN:
return criteria.in(asArray(parameters.next()));
case NOT_IN:
return criteria.in(asArray(parameters.next())).not();
default:
throw new InvalidDataAccessApiUsageException("Illegal criteria found '" + type + "'.");
case TRUE:
return criteria.is(true);
case FALSE:
return criteria.is(false);
case SIMPLE_PROPERTY:
return criteria.is(parameters.next());
case NEGATING_SIMPLE_PROPERTY:
return criteria.is(parameters.next()).not();
case REGEX:
return criteria.expression(parameters.next().toString());
case LIKE:
case STARTING_WITH:
return criteria.startsWith(parameters.next().toString());
case ENDING_WITH:
return criteria.endsWith(parameters.next().toString());
case CONTAINING:
return criteria.contains(parameters.next().toString());
case AFTER:
case GREATER_THAN:
case GREATER_THAN_EQUAL:
return criteria.greaterThanEqual(parameters.next());
case BEFORE:
case LESS_THAN:
case LESS_THAN_EQUAL:
return criteria.lessThanEqual(parameters.next());
case BETWEEN:
return criteria.between(parameters.next(), parameters.next());
case IN:
return criteria.in(asArray(parameters.next()));
case NOT_IN:
return criteria.in(asArray(parameters.next())).not();
default:
throw new InvalidDataAccessApiUsageException("Illegal criteria found '" + type + "'.");
}
}
@ -131,7 +131,6 @@ public class ElasticsearchQueryCreator extends AbstractQueryCreator<CriteriaQuer
} else if (o.getClass().isArray()) {
return (Object[]) o;
}
return new Object[] { o };
return new Object[]{o};
}
}

Some files were not shown because too many files have changed in this diff Show More