Merge remote-tracking branch 'remotes/origin/master' into im_20200316_lastn_operation_elasticsearch

# Conflicts:
#	hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java
This commit is contained in:
ianmarshall 2020-05-05 17:19:32 -04:00
commit 4cced40b54
195 changed files with 6422 additions and 2043 deletions

View File

@ -5,12 +5,7 @@ end_of_line = lf
insert_final_newline = true
tab_width = 3
indent_size = 3
[*.java]
charset = utf-8
indent_style = tab
tab_width = 3
indent_size = 3
[*.xml]
charset = utf-8
@ -30,3 +25,259 @@ indent_style = tab
tab_width = 3
indent_size = 3
[*.java]
charset = utf-8
indent_style = tab
tab_width = 3
indent_size = 3
ij_java_align_consecutive_assignments = false
ij_java_align_consecutive_variable_declarations = false
ij_java_align_group_field_declarations = false
ij_java_align_multiline_annotation_parameters = false
ij_java_align_multiline_array_initializer_expression = false
ij_java_align_multiline_assignment = false
ij_java_align_multiline_binary_operation = false
ij_java_align_multiline_chained_methods = false
ij_java_align_multiline_extends_list = false
ij_java_align_multiline_for = true
ij_java_align_multiline_method_parentheses = false
ij_java_align_multiline_parameters = true
ij_java_align_multiline_parameters_in_calls = false
ij_java_align_multiline_parenthesized_expression = false
ij_java_align_multiline_records = true
ij_java_align_multiline_resources = true
ij_java_align_multiline_ternary_operation = false
ij_java_align_multiline_text_blocks = false
ij_java_align_multiline_throws_list = false
ij_java_align_subsequent_simple_methods = false
ij_java_align_throws_keyword = false
ij_java_annotation_parameter_wrap = off
ij_java_array_initializer_new_line_after_left_brace = false
ij_java_array_initializer_right_brace_on_new_line = false
ij_java_array_initializer_wrap = off
ij_java_assert_statement_colon_on_next_line = false
ij_java_assert_statement_wrap = off
ij_java_assignment_wrap = off
ij_java_binary_operation_sign_on_next_line = false
ij_java_binary_operation_wrap = off
ij_java_blank_lines_after_anonymous_class_header = 0
ij_java_blank_lines_after_class_header = 0
ij_java_blank_lines_after_imports = 1
ij_java_blank_lines_after_package = 1
ij_java_blank_lines_around_class = 1
ij_java_blank_lines_around_field = 0
ij_java_blank_lines_around_field_in_interface = 0
ij_java_blank_lines_around_initializer = 1
ij_java_blank_lines_around_method = 1
ij_java_blank_lines_around_method_in_interface = 1
ij_java_blank_lines_before_class_end = 0
ij_java_blank_lines_before_imports = 1
ij_java_blank_lines_before_method_body = 0
ij_java_blank_lines_before_package = 0
ij_java_block_brace_style = end_of_line
ij_java_block_comment_at_first_column = true
ij_java_call_parameters_new_line_after_left_paren = false
ij_java_call_parameters_right_paren_on_new_line = false
ij_java_call_parameters_wrap = off
ij_java_case_statement_on_separate_line = true
ij_java_catch_on_new_line = false
ij_java_class_annotation_wrap = split_into_lines
ij_java_class_brace_style = end_of_line
ij_java_class_count_to_use_import_on_demand = 999
ij_java_class_names_in_javadoc = 1
ij_java_do_not_indent_top_level_class_members = false
ij_java_do_not_wrap_after_single_annotation = false
ij_java_do_while_brace_force = never
ij_java_doc_add_blank_line_after_description = true
ij_java_doc_add_blank_line_after_param_comments = false
ij_java_doc_add_blank_line_after_return = false
ij_java_doc_add_p_tag_on_empty_lines = true
ij_java_doc_align_exception_comments = true
ij_java_doc_align_param_comments = true
ij_java_doc_do_not_wrap_if_one_line = false
ij_java_doc_enable_formatting = true
ij_java_doc_enable_leading_asterisks = true
ij_java_doc_indent_on_continuation = false
ij_java_doc_keep_empty_lines = true
ij_java_doc_keep_empty_parameter_tag = true
ij_java_doc_keep_empty_return_tag = true
ij_java_doc_keep_empty_throws_tag = true
ij_java_doc_keep_invalid_tags = true
ij_java_doc_param_description_on_new_line = false
ij_java_doc_preserve_line_breaks = false
ij_java_doc_use_throws_not_exception_tag = true
ij_java_else_on_new_line = false
ij_java_entity_dd_suffix = EJB
ij_java_entity_eb_suffix = Bean
ij_java_entity_hi_suffix = Home
ij_java_entity_lhi_prefix = Local
ij_java_entity_lhi_suffix = Home
ij_java_entity_li_prefix = Local
ij_java_entity_pk_class = java.lang.String
ij_java_entity_vo_suffix = VO
ij_java_enum_constants_wrap = off
ij_java_extends_keyword_wrap = off
ij_java_extends_list_wrap = off
ij_java_field_annotation_wrap = split_into_lines
ij_java_field_name_prefix = my
ij_java_finally_on_new_line = false
ij_java_for_brace_force = never
ij_java_for_statement_new_line_after_left_paren = false
ij_java_for_statement_right_paren_on_new_line = false
ij_java_for_statement_wrap = off
ij_java_generate_final_locals = false
ij_java_generate_final_parameters = false
ij_java_if_brace_force = never
ij_java_imports_layout = *,|,javax.**,java.**,|,$*
ij_java_indent_case_from_switch = true
ij_java_insert_inner_class_imports = false
ij_java_insert_override_annotation = true
ij_java_keep_blank_lines_before_right_brace = 2
ij_java_keep_blank_lines_between_package_declaration_and_header = 2
ij_java_keep_blank_lines_in_code = 2
ij_java_keep_blank_lines_in_declarations = 2
ij_java_keep_control_statement_in_one_line = true
ij_java_keep_first_column_comment = true
ij_java_keep_indents_on_empty_lines = false
ij_java_keep_line_breaks = true
ij_java_keep_multiple_expressions_in_one_line = false
ij_java_keep_simple_blocks_in_one_line = false
ij_java_keep_simple_classes_in_one_line = false
ij_java_keep_simple_lambdas_in_one_line = false
ij_java_keep_simple_methods_in_one_line = false
ij_java_label_indent_absolute = false
ij_java_label_indent_size = 0
ij_java_lambda_brace_style = end_of_line
ij_java_layout_static_imports_separately = true
ij_java_line_comment_add_space = false
ij_java_line_comment_at_first_column = true
ij_java_message_dd_suffix = EJB
ij_java_message_eb_suffix = Bean
ij_java_method_annotation_wrap = split_into_lines
ij_java_method_brace_style = end_of_line
ij_java_method_call_chain_wrap = off
ij_java_method_parameters_new_line_after_left_paren = false
ij_java_method_parameters_right_paren_on_new_line = false
ij_java_method_parameters_wrap = off
ij_java_modifier_list_wrap = false
ij_java_names_count_to_use_import_on_demand = 999
ij_java_new_line_after_lparen_in_record_header = false
ij_java_packages_to_use_import_on_demand = java.awt.*,javax.swing.*
ij_java_parameter_annotation_wrap = off
ij_java_parameter_name_prefix = the
ij_java_parentheses_expression_new_line_after_left_paren = false
ij_java_parentheses_expression_right_paren_on_new_line = false
ij_java_place_assignment_sign_on_next_line = false
ij_java_prefer_longer_names = true
ij_java_prefer_parameters_wrap = false
ij_java_record_components_wrap = normal
ij_java_repeat_synchronized = true
ij_java_replace_instanceof_and_cast = false
ij_java_replace_null_check = true
ij_java_replace_sum_lambda_with_method_ref = true
ij_java_resource_list_new_line_after_left_paren = false
ij_java_resource_list_right_paren_on_new_line = false
ij_java_resource_list_wrap = off
ij_java_rparen_on_new_line_in_record_header = false
ij_java_session_dd_suffix = EJB
ij_java_session_eb_suffix = Bean
ij_java_session_hi_suffix = Home
ij_java_session_lhi_prefix = Local
ij_java_session_lhi_suffix = Home
ij_java_session_li_prefix = Local
ij_java_session_si_suffix = Service
ij_java_space_after_closing_angle_bracket_in_type_argument = false
ij_java_space_after_colon = true
ij_java_space_after_comma = true
ij_java_space_after_comma_in_type_arguments = true
ij_java_space_after_for_semicolon = true
ij_java_space_after_quest = true
ij_java_space_after_type_cast = true
ij_java_space_before_annotation_array_initializer_left_brace = false
ij_java_space_before_annotation_parameter_list = false
ij_java_space_before_array_initializer_left_brace = false
ij_java_space_before_catch_keyword = true
ij_java_space_before_catch_left_brace = true
ij_java_space_before_catch_parentheses = true
ij_java_space_before_class_left_brace = true
ij_java_space_before_colon = true
ij_java_space_before_colon_in_foreach = true
ij_java_space_before_comma = false
ij_java_space_before_do_left_brace = true
ij_java_space_before_else_keyword = true
ij_java_space_before_else_left_brace = true
ij_java_space_before_finally_keyword = true
ij_java_space_before_finally_left_brace = true
ij_java_space_before_for_left_brace = true
ij_java_space_before_for_parentheses = true
ij_java_space_before_for_semicolon = false
ij_java_space_before_if_left_brace = true
ij_java_space_before_if_parentheses = true
ij_java_space_before_method_call_parentheses = false
ij_java_space_before_method_left_brace = true
ij_java_space_before_method_parentheses = false
ij_java_space_before_opening_angle_bracket_in_type_parameter = false
ij_java_space_before_quest = true
ij_java_space_before_switch_left_brace = true
ij_java_space_before_switch_parentheses = true
ij_java_space_before_synchronized_left_brace = true
ij_java_space_before_synchronized_parentheses = true
ij_java_space_before_try_left_brace = true
ij_java_space_before_try_parentheses = true
ij_java_space_before_type_parameter_list = false
ij_java_space_before_while_keyword = true
ij_java_space_before_while_left_brace = true
ij_java_space_before_while_parentheses = true
ij_java_space_inside_one_line_enum_braces = false
ij_java_space_within_empty_array_initializer_braces = false
ij_java_space_within_empty_method_call_parentheses = false
ij_java_space_within_empty_method_parentheses = false
ij_java_spaces_around_additive_operators = true
ij_java_spaces_around_assignment_operators = true
ij_java_spaces_around_bitwise_operators = true
ij_java_spaces_around_equality_operators = true
ij_java_spaces_around_lambda_arrow = true
ij_java_spaces_around_logical_operators = true
ij_java_spaces_around_method_ref_dbl_colon = false
ij_java_spaces_around_multiplicative_operators = true
ij_java_spaces_around_relational_operators = true
ij_java_spaces_around_shift_operators = true
ij_java_spaces_around_type_bounds_in_type_parameters = true
ij_java_spaces_around_unary_operator = false
ij_java_spaces_within_angle_brackets = false
ij_java_spaces_within_annotation_parentheses = false
ij_java_spaces_within_array_initializer_braces = false
ij_java_spaces_within_braces = false
ij_java_spaces_within_brackets = false
ij_java_spaces_within_cast_parentheses = false
ij_java_spaces_within_catch_parentheses = false
ij_java_spaces_within_for_parentheses = false
ij_java_spaces_within_if_parentheses = false
ij_java_spaces_within_method_call_parentheses = false
ij_java_spaces_within_method_parentheses = false
ij_java_spaces_within_parentheses = false
ij_java_spaces_within_switch_parentheses = false
ij_java_spaces_within_synchronized_parentheses = false
ij_java_spaces_within_try_parentheses = false
ij_java_spaces_within_while_parentheses = false
ij_java_special_else_if_treatment = true
ij_java_static_field_name_prefix = our
ij_java_subclass_name_suffix = Impl
ij_java_ternary_operation_signs_on_next_line = false
ij_java_ternary_operation_wrap = off
ij_java_test_name_suffix = Test
ij_java_throws_keyword_wrap = off
ij_java_throws_list_wrap = off
ij_java_use_external_annotations = false
ij_java_use_fq_class_names = false
ij_java_use_relative_indents = false
ij_java_use_single_class_imports = true
ij_java_variable_annotation_wrap = off
ij_java_visibility = public
ij_java_while_brace_force = never
ij_java_while_on_new_line = false
ij_java_wrap_comments = false
ij_java_wrap_first_method_in_call_chain = false
ij_java_wrap_long_lines = false

View File

@ -5,7 +5,6 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.instance.model.api.IBaseDatatype;
import org.hl7.fhir.instance.model.api.IBaseExtension;
import org.hl7.fhir.instance.model.api.IIdType;
@ -105,19 +104,30 @@ public class RuntimeSearchParam {
}
}
/**
* Constructor
*/
public RuntimeSearchParam(String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, Set<String> theProvidesMembershipInCompartments, Set<String> theTargets, RuntimeSearchParamStatusEnum theStatus) {
this(null, null, theName, theDescription, thePath, theParamType, null, theProvidesMembershipInCompartments, theTargets, theStatus);
}
/**
* Retrieve user data - This can be used to store any application-specific data
*
* @return
* Copy constructor
*/
public RuntimeSearchParam(RuntimeSearchParam theSp) {
this(theSp.getId(), theSp.getUri(), theSp.getName(), theSp.getDescription(), theSp.getPath(), theSp.getParamType(), theSp.getCompositeOf(), theSp.getProvidesMembershipInCompartments(), theSp.getTargets(), theSp.getStatus(), theSp.getBase());
}
/**
* Retrieve user data - This can be used to store any application-specific data
*/
@Nonnull
public List<IBaseExtension<?, ?>> getExtensions(String theKey) {
List<IBaseExtension<?, ?>> retVal = myExtensions.get(theKey);
if (retVal != null) {
retVal = Collections.unmodifiableList(retVal);
} else {
retVal = Collections.emptyList();
}
return retVal;
}

View File

@ -231,9 +231,9 @@ public enum Pointcut {
/**
* <b>Server Hook:</b>
* This hook is invoked before an incoming request is processed. Note that this method is called
* after the server has begin preparing the response to the incoming client request.
* after the server has begun preparing the response to the incoming client request.
* As such, it is not able to supply a response to the incoming request in the way that
* SERVER_INCOMING_REQUEST_PRE_HANDLED and
* SERVER_INCOMING_REQUEST_PRE_PROCESSED and
* {@link #SERVER_INCOMING_REQUEST_POST_PROCESSED}
* are.
* <p>
@ -425,7 +425,7 @@ public enum Pointcut {
"java.io.Writer",
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
),
),
/**
@ -1401,7 +1401,7 @@ public enum Pointcut {
* </li>
* </ul>
* <p>
* Hooks should return an instance of <code>ca.uhn.fhir.jpa.api.model.RequestPartitionId</code> or <code>null</code>.
* Hooks must return an instance of <code>ca.uhn.fhir.interceptor.model.RequestPartitionId</code>.
* </p>
*/
STORAGE_PARTITION_IDENTIFY_CREATE(
@ -1440,7 +1440,7 @@ public enum Pointcut {
* </li>
* </ul>
* <p>
* Hooks should return an instance of <code>ca.uhn.fhir.jpa.api.model.RequestPartitionId</code> or <code>null</code>.
* Hooks must return an instance of <code>ca.uhn.fhir.interceptor.model.RequestPartitionId</code>.
* </p>
*/
STORAGE_PARTITION_IDENTIFY_READ(
@ -1451,6 +1451,49 @@ public enum Pointcut {
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
),
/**
* <b>Storage Hook:</b>
* Invoked before any partition aware FHIR operation, when the selected partition has been identified (ie. after the
* {@link #STORAGE_PARTITION_IDENTIFY_CREATE} or {@link #STORAGE_PARTITION_IDENTIFY_READ} hook was called. This allows
* a separate hook to register, and potentially make decisions about whether the request should be allowed to proceed.
* <p>
* This hook will only be called if
* partitioning is enabled in the JPA server.
* </p>
* <p>
* Hooks may accept the following parameters:
* </p>
* <ul>
* <li>
* ca.uhn.fhir.interceptor.model.RequestPartitionId - The partition ID that was selected
* </li>
* <li>
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. Note that the bean
* properties are not all guaranteed to be populated, depending on how early during processing the
* exception occurred.
* </li>
* <li>
* ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
* </li>
* </ul>
* <p>
* Hooks must return void.
* </p>
*/
STORAGE_PARTITION_SELECTED(
// Return type
void.class,
// Params
"ca.uhn.fhir.interceptor.model.RequestPartitionId",
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
),
/**
* <b>Performance Tracing Hook:</b>
* This hook is invoked when any informational messages generated by the

View File

@ -20,27 +20,55 @@ package ca.uhn.fhir.interceptor.model;
* #L%
*/
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.time.LocalDate;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
/**
* @since 5.0.0
*/
public class RequestPartitionId {
private final Integer myPartitionId;
private static final RequestPartitionId ALL_PARTITIONS = new RequestPartitionId();
private final LocalDate myPartitionDate;
private final boolean myAllPartitions;
private final Integer myPartitionId;
private final String myPartitionName;
/**
* Constructor
* Constructor for a single partition
*/
private RequestPartitionId(@Nullable String thePartitionName, @Nullable Integer thePartitionId, @Nullable LocalDate thePartitionDate) {
myPartitionName = thePartitionName;
myPartitionId = thePartitionId;
myPartitionName = thePartitionName;
myPartitionDate = thePartitionDate;
myAllPartitions = false;
}
/**
* Constructor for all partitions
*/
private RequestPartitionId() {
super();
myPartitionDate = null;
myPartitionName = null;
myPartitionId = null;
myAllPartitions = true;
}
public boolean isAllPartitions() {
return myAllPartitions;
}
@Nullable
public LocalDate getPartitionDate() {
return myPartitionDate;
}
@Nullable
public String getPartitionName() {
return myPartitionName;
}
@ -50,32 +78,59 @@ public class RequestPartitionId {
return myPartitionId;
}
@Nullable
public LocalDate getPartitionDate() {
return myPartitionDate;
}
@Override
public String toString() {
return getPartitionIdStringOrNullString();
return "RequestPartitionId[id=" + getPartitionId() + ", name=" + getPartitionName() + "]";
}
/**
* Returns the partition ID (numeric) as a string, or the string "null"
*/
public String getPartitionIdStringOrNullString() {
return defaultIfNull(myPartitionId, "null").toString();
if (myPartitionId == null) {
return "null";
}
return myPartitionId.toString();
}
/**
* Create a string representation suitable for use as a cache key. Null aware.
*/
public static String stringifyForKey(RequestPartitionId theRequestPartitionId) {
String retVal = "(null)";
if (theRequestPartitionId != null) {
retVal = theRequestPartitionId.getPartitionIdStringOrNullString();
@Override
public boolean equals(Object theO) {
if (this == theO) {
return true;
}
return retVal;
if (theO == null || getClass() != theO.getClass()) {
return false;
}
RequestPartitionId that = (RequestPartitionId) theO;
return new EqualsBuilder()
.append(myAllPartitions, that.myAllPartitions)
.append(myPartitionDate, that.myPartitionDate)
.append(myPartitionId, that.myPartitionId)
.append(myPartitionName, that.myPartitionName)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(myPartitionDate)
.append(myAllPartitions)
.append(myPartitionId)
.append(myPartitionName)
.toHashCode();
}
@Nonnull
public static RequestPartitionId allPartitions() {
return ALL_PARTITIONS;
}
@Nonnull
public static RequestPartitionId defaultPartition() {
return fromPartitionId(null);
}
@Nonnull
@ -99,8 +154,23 @@ public class RequestPartitionId {
}
@Nonnull
public static RequestPartitionId forPartitionNameAndId(@Nullable String thePartitionName, @Nullable Integer thePartitionId, @Nullable LocalDate thePartitionDate) {
public static RequestPartitionId fromPartitionIdAndName(@Nullable Integer thePartitionId, @Nullable String thePartitionName) {
return new RequestPartitionId(thePartitionName, thePartitionId, null);
}
@Nonnull
public static RequestPartitionId forPartitionIdAndName(@Nullable Integer thePartitionId, @Nullable String thePartitionName, @Nullable LocalDate thePartitionDate) {
return new RequestPartitionId(thePartitionName, thePartitionId, thePartitionDate);
}
/**
* Create a string representation suitable for use as a cache key. Null aware.
*/
public static String stringifyForKey(RequestPartitionId theRequestPartitionId) {
String retVal = "(null)";
if (theRequestPartitionId != null) {
retVal = theRequestPartitionId.getPartitionIdStringOrNullString();
}
return retVal;
}
}

View File

@ -264,6 +264,13 @@ public class Constants {
public static final String PARAM_FHIRPATH = "_fhirpath";
public static final String PARAM_TYPE = "_type";
/**
* {@link org.hl7.fhir.instance.model.api.IBaseResource#getUserData(String) User metadata key} used
* to store the partition ID (if any) associated with the given resource. Value for this
* key will be of type {@link ca.uhn.fhir.interceptor.model.RequestPartitionId}.
*/
public static final String RESOURCE_PARTITION_ID = Constants.class.getName() + "_RESOURCE_PARTITION_ID";
static {
CHARSET_UTF8 = StandardCharsets.UTF_8;
CHARSET_US_ASCII = StandardCharsets.ISO_8859_1;

View File

@ -0,0 +1,37 @@
package ca.uhn.fhir.rest.client.api;
/*-
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2020 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public abstract class BaseHttpRequest implements IHttpRequest {
private UrlSourceEnum myUrlSource;
@Override
public UrlSourceEnum getUrlSource() {
return myUrlSource;
}
@Override
public void setUrlSource(UrlSourceEnum theUrlSource) {
myUrlSource = theUrlSource;
}
}

View File

@ -60,11 +60,15 @@ public interface IHttpRequest {
/**
* Return the request URI, or null
*
* @see #getUri()
*/
String getUri();
/**
* Modify the request URI, or null
*
* @see #setUrlSource(UrlSourceEnum)
*/
void setUri(String theUrl);
@ -79,4 +83,19 @@ public interface IHttpRequest {
* @param theHeaderName The header name, e.g. "Accept" (must not be null or blank)
*/
void removeHeaders(String theHeaderName);
/**
* Where was the URL from?
*
* @since 5.0.0
*/
UrlSourceEnum getUrlSource();
/**
* Where was the URL from?
*
* @since 5.0.0
*/
void setUrlSource(UrlSourceEnum theUrlSource);
}

View File

@ -0,0 +1,35 @@
package ca.uhn.fhir.rest.client.api;
/*-
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2020 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public enum UrlSourceEnum {
/**
* URL was generated (typically by adding the base URL + other things)
*/
GENERATED,
/**
* URL was supplied (i.e. it came from a paging link in a bundle)
*/
EXPLICIT
}

View File

@ -28,7 +28,7 @@ import ca.uhn.fhir.rest.api.QualifiedParamList;
import java.util.ArrayList;
import java.util.List;
abstract class BaseOrListParam<MT extends BaseOrListParam<?, ?>, PT extends IQueryParameterType> implements IQueryParameterOr<PT> {
public abstract class BaseOrListParam<MT extends BaseOrListParam<?, ?>, PT extends IQueryParameterType> implements IQueryParameterOr<PT> {
private List<PT> myList = new ArrayList<>();

View File

@ -31,7 +31,7 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
/**
* Base class for RESTful operation parameter types
*/
abstract class BaseParam implements IQueryParameterType {
public abstract class BaseParam implements IQueryParameterType {
private Boolean myMissing;

View File

@ -6,12 +6,19 @@ import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.apache.commons.lang3.time.DateUtils;
import ca.uhn.fhir.util.DateUtils;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import java.util.*;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.TimeZone;
import static ca.uhn.fhir.rest.param.ParamPrefixEnum.*;
import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL;
import static ca.uhn.fhir.rest.param.ParamPrefixEnum.GREATERTHAN_OR_EQUALS;
import static ca.uhn.fhir.rest.param.ParamPrefixEnum.LESSTHAN_OR_EQUALS;
import static java.lang.String.format;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@ -263,6 +270,67 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
return this;
}
/**
* Return the current lower bound as an integer representative of the date.
*
* e.g. 2019-02-22T04:22:00-0500 -> 20120922
*/
public Integer getLowerBoundAsDateInteger() {
if (myLowerBound == null || myLowerBound.getValue() == null) {
return null;
}
int retVal = DateUtils.convertDatetoDayInteger(myLowerBound.getValue());
if (myLowerBound.getPrefix() != null) {
switch (myLowerBound.getPrefix()) {
case GREATERTHAN:
case STARTS_AFTER:
retVal += 1;
break;
case EQUAL:
case GREATERTHAN_OR_EQUALS:
break;
case LESSTHAN:
case APPROXIMATE:
case LESSTHAN_OR_EQUALS:
case ENDS_BEFORE:
case NOT_EQUAL:
throw new IllegalStateException("Invalid lower bound comparator: " + myLowerBound.getPrefix());
}
}
return retVal;
}
/**
* Return the current upper bound as an integer representative of the date
*
* e.g. 2019-02-22T04:22:00-0500 -> 2019122
*/
public Integer getUpperBoundAsDateInteger() {
if (myUpperBound == null || myUpperBound.getValue() == null) {
return null;
}
int retVal = DateUtils.convertDatetoDayInteger(myUpperBound.getValue());
if (myUpperBound.getPrefix() != null) {
switch (myUpperBound.getPrefix()) {
case LESSTHAN:
case ENDS_BEFORE:
retVal -= 1;
break;
case EQUAL:
case LESSTHAN_OR_EQUALS:
break;
case GREATERTHAN_OR_EQUALS:
case GREATERTHAN:
case APPROXIMATE:
case NOT_EQUAL:
case STARTS_AFTER:
throw new IllegalStateException("Invalid upper bound comparator: " + myUpperBound.getPrefix());
}
}
return retVal;
}
public Date getLowerBoundAsInstant() {
if (myLowerBound == null || myLowerBound.getValue() == null) {
return null;
@ -270,10 +338,7 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
Date retVal = myLowerBound.getValue();
if (myLowerBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) {
Calendar cal = DateUtils.toCalendar(retVal);
cal.setTimeZone(TimeZone.getTimeZone("GMT-11:30"));
cal = DateUtils.truncate(cal, Calendar.DATE);
retVal = cal.getTime();
retVal = DateUtils.getLowestInstantFromDate(retVal);
}
if (myLowerBound.getPrefix() != null) {
@ -335,10 +400,7 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
Date retVal = myUpperBound.getValue();
if (myUpperBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) {
Calendar cal = DateUtils.toCalendar(retVal);
cal.setTimeZone(TimeZone.getTimeZone("GMT+11:30"));
cal = DateUtils.truncate(cal, Calendar.DATE);
retVal = cal.getTime();
retVal = DateUtils.getHighestInstantFromDate(retVal);
}
if (myUpperBound.getPrefix() != null) {

View File

@ -0,0 +1,126 @@
package ca.uhn.fhir.util;
/*-
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2020 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import com.google.common.base.Charsets;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.input.BOMInputStream;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import java.io.IOException;
import java.io.InputStream;
import java.util.function.Function;
import java.util.zip.GZIPInputStream;
/**
* Use this API with caution, it may change!
*/
public class ClasspathUtil {
private static final Logger ourLog = LoggerFactory.getLogger(ClasspathUtil.class);
public static String loadResource(String theClasspath) {
Function<InputStream, InputStream> streamTransform = t -> t;
return loadResource(theClasspath, streamTransform);
}
/**
* Load a classpath resource, throw an {@link InternalErrorException} if not found
*/
@Nonnull
public static InputStream loadResourceAsStream(String theClasspath) {
InputStream retVal = ClasspathUtil.class.getResourceAsStream(theClasspath);
if (retVal == null) {
throw new InternalErrorException("Unable to find classpath resource: " + theClasspath);
}
return retVal;
}
/**
* Load a classpath resource, throw an {@link InternalErrorException} if not found
*/
@Nonnull
public static String loadResource(String theClasspath, Function<InputStream, InputStream> theStreamTransform) {
InputStream stream = ClasspathUtil.class.getResourceAsStream(theClasspath);
try {
if (stream == null) {
throw new IOException("Unable to find classpath resource: " + theClasspath);
}
try {
InputStream newStream = theStreamTransform.apply(stream);
return IOUtils.toString(newStream, Charsets.UTF_8);
} finally {
stream.close();
}
} catch (IOException e) {
throw new InternalErrorException(e);
}
}
@Nonnull
public static String loadCompressedResource(String theClasspath) {
Function<InputStream, InputStream> streamTransform = t -> {
try {
return new GZIPInputStream(t);
} catch (IOException e) {
throw new InternalErrorException(e);
}
};
return loadResource(theClasspath, streamTransform);
}
@Nonnull
public static <T extends IBaseResource> T loadResource(FhirContext theCtx, Class<T> theType, String theClasspath) {
String raw = loadResource(theClasspath);
return EncodingEnum.detectEncodingNoDefault(raw).newParser(theCtx).parseResource(theType, raw);
}
public static void close(InputStream theInput) {
try {
if (theInput != null) {
theInput.close();
}
} catch (IOException e) {
ourLog.debug("Closing InputStream threw exception", e);
}
}
public static Function<InputStream, InputStream> withBom() {
return t -> new BOMInputStream(t);
}
public static byte[] loadResourceAsByteArray(String theClasspath) {
InputStream stream = loadResourceAsStream(theClasspath);
try {
return IOUtils.toByteArray(stream);
} catch (IOException e) {
throw new InternalErrorException(e);
} finally {
close(stream);
}
}
}

View File

@ -23,7 +23,12 @@ package ca.uhn.fhir.util;
import java.lang.ref.SoftReference;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
/**
* A utility class for parsing and formatting HTTP dates as used in cookies and
@ -65,6 +70,8 @@ public final class DateUtils {
@SuppressWarnings("WeakerAccess")
public static final String PATTERN_ASCTIME = "EEE MMM d HH:mm:ss yyyy";
private static final String PATTERN_INTEGER_DATE = "yyyyMMdd";
private static final String[] DEFAULT_PATTERNS = new String[]{
PATTERN_RFC1123,
PATTERN_RFC1036,
@ -153,6 +160,35 @@ public final class DateUtils {
return null;
}
public static Date getHighestInstantFromDate(Date theDateValue) {
return getInstantFromDateWithTimezone(theDateValue, TimeZone.getTimeZone("GMT+11:30"));
}
public static Date getLowestInstantFromDate(Date theDateValue) {
return getInstantFromDateWithTimezone(theDateValue, TimeZone.getTimeZone("GMT-11:30"));
}
public static Date getInstantFromDateWithTimezone(Date theDateValue, TimeZone theTimezone) {
Calendar cal = org.apache.commons.lang3.time.DateUtils.toCalendar(theDateValue);
cal.setTimeZone(theTimezone);
cal = org.apache.commons.lang3.time.DateUtils.truncate(cal, Calendar.DATE);
return cal.getTime();
}
public static int convertDatetoDayInteger(final Date theDateValue) {
notNull(theDateValue, "Date value");
SimpleDateFormat format = new SimpleDateFormat(PATTERN_INTEGER_DATE);
String theDateString = format.format(theDateValue);
return Integer.parseInt(theDateString);
}
public static String convertDateToIso8601String(final Date theDateValue){
notNull(theDateValue, "Date value");
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX");
return format.format(theDateValue);
}
/**
* Formats the given date according to the RFC 1123 pattern.
*

View File

@ -96,7 +96,7 @@ public class MetaUtil {
value.setValue(theValue);
sourceExtension.setValue(value);
} else {
ourLog.error(MetaUtil.class.getSimpleName() + ".setSource() not supported on FHIR Version " + theContext.getVersion().getVersion());
ourLog.debug(MetaUtil.class.getSimpleName() + ".setSource() not supported on FHIR Version " + theContext.getVersion().getVersion());
}
}

View File

@ -59,6 +59,7 @@ public enum VersionEnum {
V4_0_3,
V4_1_0,
V4_2_0,
@Deprecated
V4_3_0, // 4.3.0 was renamed to 5.0.0 during the cycle
V5_0_0;

View File

@ -23,9 +23,7 @@ package ca.uhn.fhir.validation;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.input.BOMInputStream;
import ca.uhn.fhir.util.ClasspathUtil;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.w3c.dom.ls.LSInput;
import org.w3c.dom.ls.LSResourceResolver;
@ -41,10 +39,7 @@ import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
@ -152,20 +147,9 @@ public class SchemaBaseValidator implements IValidatorModule {
Source loadXml(String theSchemaName) {
String pathToBase = myCtx.getVersion().getPathToSchemaDefinitions() + '/' + theSchemaName;
ourLog.debug("Going to load resource: {}", pathToBase);
try (InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase)) {
if (baseIs == null) {
throw new InternalErrorException("Schema not found. " + RESOURCES_JAR_NOTE);
}
try (BOMInputStream bomInputStream = new BOMInputStream(baseIs, false)) {
try (InputStreamReader baseReader = new InputStreamReader(bomInputStream, StandardCharsets.UTF_8)) {
// Buffer so that we can close the input stream
String contents = IOUtils.toString(baseReader);
return new StreamSource(new StringReader(contents), null);
}
}
} catch (IOException e) {
throw new InternalErrorException(e);
}
String contents = ClasspathUtil.loadResource(pathToBase, ClasspathUtil.withBom());
return new StreamSource(new StringReader(contents), null);
}
@Override
@ -188,16 +172,8 @@ public class SchemaBaseValidator implements IValidatorModule {
ourLog.debug("Loading referenced schema file: " + pathToBase);
try (InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase)) {
if (baseIs == null) {
throw new InternalErrorException("Schema file not found: " + pathToBase);
}
byte[] bytes = IOUtils.toByteArray(baseIs);
input.setByteStream(new ByteArrayInputStream(bytes));
} catch (IOException e) {
throw new InternalErrorException(e);
}
byte[] bytes = ClasspathUtil.loadResourceAsByteArray(pathToBase);
input.setByteStream(new ByteArrayInputStream(bytes));
return input;
}

View File

@ -22,16 +22,26 @@ package org.hl7.fhir.instance.model.api;
*/
public interface IPrimitiveType<T> extends IBaseDatatype {
import javax.annotation.Nullable;
void setValueAsString(String theValue) throws IllegalArgumentException;
public interface IPrimitiveType<T> extends IBaseDatatype {
String getValueAsString();
void setValueAsString(String theValue) throws IllegalArgumentException;
T getValue();
boolean hasValue();
IPrimitiveType<T> setValue(T theValue) throws IllegalArgumentException;
boolean hasValue();
/**
* If the supplied argument is non-null, returns the results of {@link #getValue()}. If the supplied argument is null, returns null.
*/
@Nullable
static <T> T toValueOrNull(@Nullable IPrimitiveType<T> thePrimitiveType) {
return thePrimitiveType != null ? thePrimitiveType.getValue() : null;
}
}

View File

@ -83,7 +83,6 @@ ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionContainsMultipleWithDuplica
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionEntryHasInvalidVerb=Transaction bundle entry has missing or invalid HTTP Verb specified in Bundle.entry({1}).request.method. Found value: "{0}"
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionMissingUrl=Unable to perform {0}, no URL provided.
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.transactionInvalidUrl=Unable to perform {0}, URL provided is invalid: {1}
ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao.noSystemOrTypeHistoryForPartitionAwareServer=Type- and Server- level history operation not supported on partitioned server
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.cantValidateWithNoResource=No resource supplied for $validate operation (resource is required unless mode is \"delete\")
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao.deleteBlockedBecauseDisabled=Resource deletion is not permitted on this server
@ -122,6 +121,9 @@ ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoConceptMapR4.matchesFound=Matches found!
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoConceptMapR4.noMatchesFound=No matches found!
ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoSearchParameterR4.invalidSearchParamExpression=The expression "{0}" can not be evaluated and may be invalid: {1}
ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderToken.textModifierDisabledForSearchParam=The :text modifier is disabled for this search parameter
ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderToken.textModifierDisabledForServer=The :text modifier is disabled on this server
ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor.successMsg=Cascaded delete to {0} resources: {1}
ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor.noParam=Note that cascading deletes are not active for this request. You can enable cascading deletes by using the "_cascade=delete" URL parameter.
@ -141,9 +143,9 @@ ca.uhn.fhir.jpa.util.jsonpatch.JsonPatchUtils.failedToApplyPatch=Failed to apply
ca.uhn.fhir.jpa.graphql.JpaStorageServices.invalidGraphqlArgument=Unknown GraphQL argument "{0}". Value GraphQL argument for this type are: {1}
ca.uhn.fhir.jpa.partition.RequestPartitionHelperService.blacklistedResourceTypeForPartitioning=Resource type {0} can not be partitioned
ca.uhn.fhir.jpa.partition.RequestPartitionHelperService.unknownPartitionId=Unknown partition ID: {0}
ca.uhn.fhir.jpa.partition.RequestPartitionHelperService.unknownPartitionName=Unknown partition name: {0}
ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc.blacklistedResourceTypeForPartitioning=Resource type {0} can not be partitioned
ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc.unknownPartitionId=Unknown partition ID: {0}
ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc.unknownPartitionName=Unknown partition name: {0}
ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference.invalidTargetTypeForChain=Resource type "{0}" is not a valid target type for reference search parameter: {1}
@ -151,6 +153,7 @@ ca.uhn.fhir.jpa.dao.predicate.PredicateBuilderReference.invalidResourceType=Inva
ca.uhn.fhir.jpa.dao.index.IdHelperService.nonUniqueForcedId=Non-unique ID specified, can not process request
ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.noIdSupplied=No Partition ID supplied
ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.missingPartitionIdOrName=Partition must have an ID and a Name
ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.cantCreatePartition0=Can not create a partition with ID 0 (this is a reserved value)
ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.unknownPartitionId=No partition exists with ID {0}
@ -160,3 +163,5 @@ ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.cantDeleteDefaultPartition=Can
ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.cantRenameDefaultPartition=Can not rename default partition
ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor.unknownTenantName=Unknown tenant: {0}
ca.uhn.fhir.jpa.dao.HistoryBuilder.noSystemOrTypeHistoryForPartitionAwareServer=Type- and Server- level history operation not supported across partitions on partitioned server

View File

@ -0,0 +1,24 @@
package ca.uhn.fhir.interceptor.model;
import org.junit.Test;
import java.time.LocalDate;
import static org.junit.Assert.*;
public class RequestPartitionIdTest {
@Test
public void testHashCode() {
assertEquals(31860737, RequestPartitionId.allPartitions().hashCode());
}
@Test
public void testEquals() {
assertEquals(RequestPartitionId.fromPartitionId(123, LocalDate.of(2020,1,1)), RequestPartitionId.fromPartitionId(123, LocalDate.of(2020,1,1)));
assertNotEquals(RequestPartitionId.fromPartitionId(123, LocalDate.of(2020,1,1)), null);
assertNotEquals(RequestPartitionId.fromPartitionId(123, LocalDate.of(2020,1,1)), "123");
}
}

View File

@ -0,0 +1,61 @@
package ca.uhn.fhir.util;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import static org.junit.Assert.*;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
public class ClasspathUtilTest {
@Test
public void testLoadResourceNotFound() {
try {
ClasspathUtil.loadResource("/FOOOOOO");
} catch (InternalErrorException e) {
assertEquals("Unable to find classpath resource: /FOOOOOO", e.getMessage());
}
}
@Test
public void testLoadResourceAsStreamNotFound() {
try {
ClasspathUtil.loadResourceAsStream("/FOOOOOO");
} catch (InternalErrorException e) {
assertEquals("Unable to find classpath resource: /FOOOOOO", e.getMessage());
}
}
/**
* Should not throw any exception
*/
@Test
public void testClose_Null() {
ClasspathUtil.close(null);
}
/**
* Should not throw any exception
*/
@Test
public void testClose_Ok() {
ClasspathUtil.close(new ByteArrayInputStream(new byte[]{0,1,2}));
}
/**
* Should not throw any exception
*/
@Test
public void testClose_ThrowException() throws IOException {
InputStream is = mock(InputStream.class);
doThrow(new IOException("FOO")).when(is).close();
ClasspathUtil.close(is);
}
}

View File

@ -16,6 +16,12 @@
<Encoding>utf-8</Encoding>
</appender>
<!-- HAPI HL7v2 App Module -->
<logger name="ca.uhn.hl7v2.app" additivity="false" level="debug">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
</logger>
<logger name="ca.uhn.fhir" additivity="false" level="debug">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />

View File

@ -26,6 +26,7 @@ import java.util.Map;
*/
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.client.api.BaseHttpRequest;
import ca.uhn.fhir.rest.client.api.IHttpRequest;
import ca.uhn.fhir.rest.client.api.IHttpResponse;
import ca.uhn.fhir.util.StopWatch;
@ -39,7 +40,7 @@ import okhttp3.RequestBody;
*
* @author Matthew Clarke | matthew.clarke@orionhealth.com | Orion Health
*/
public class OkHttpRestfulRequest implements IHttpRequest {
public class OkHttpRestfulRequest extends BaseHttpRequest implements IHttpRequest {
private final Request.Builder myRequestBuilder;
private Factory myClient;

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.rest.client.apache;
* #L%
*/
import ca.uhn.fhir.rest.client.api.BaseHttpRequest;
import ca.uhn.fhir.rest.client.api.IHttpRequest;
import ca.uhn.fhir.rest.client.api.IHttpResponse;
import ca.uhn.fhir.util.StopWatch;
@ -36,7 +37,11 @@ import org.apache.http.entity.ContentType;
import java.io.IOException;
import java.net.URI;
import java.nio.charset.Charset;
import java.util.*;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* A Http Request based on Apache. This is an adapter around the class
@ -44,7 +49,7 @@ import java.util.*;
*
* @author Peter Van Houte | peter.vanhoute@agfa.com | Agfa Healthcare
*/
public class ApacheHttpRequest implements IHttpRequest {
public class ApacheHttpRequest extends BaseHttpRequest implements IHttpRequest {
private HttpClient myClient;
private HttpRequestBase myRequest;
@ -112,13 +117,13 @@ public class ApacheHttpRequest implements IHttpRequest {
}
@Override
public void setUri(String theUrl) {
myRequest.setURI(URI.create(theUrl));
public String getUri() {
return myRequest.getURI().toString();
}
@Override
public String getUri() {
return myRequest.getURI().toString();
public void setUri(String theUrl) {
myRequest.setURI(URI.create(theUrl));
}
@Override

View File

@ -20,7 +20,13 @@ package ca.uhn.fhir.rest.client.impl;
* #L%
*/
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.IRuntimeDatatypeDefinition;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.Include;
@ -30,16 +36,92 @@ import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.model.primitive.UriDt;
import ca.uhn.fhir.model.valueset.BundleEntryTransactionMethodEnum;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.api.CacheControlDirective;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.DeleteCascadeModeEnum;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.PatchTypeEnum;
import ca.uhn.fhir.rest.api.PreferReturnEnum;
import ca.uhn.fhir.rest.api.SearchStyleEnum;
import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.SummaryEnum;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.client.api.IHttpClient;
import ca.uhn.fhir.rest.client.api.IHttpRequest;
import ca.uhn.fhir.rest.client.api.UrlSourceEnum;
import ca.uhn.fhir.rest.client.exceptions.NonFhirResponseException;
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import ca.uhn.fhir.rest.client.method.*;
import ca.uhn.fhir.rest.gclient.*;
import ca.uhn.fhir.rest.client.method.DeleteMethodBinding;
import ca.uhn.fhir.rest.client.method.HistoryMethodBinding;
import ca.uhn.fhir.rest.client.method.HttpDeleteClientInvocation;
import ca.uhn.fhir.rest.client.method.HttpGetClientInvocation;
import ca.uhn.fhir.rest.client.method.HttpSimpleGetClientInvocation;
import ca.uhn.fhir.rest.client.method.IClientResponseHandler;
import ca.uhn.fhir.rest.client.method.MethodUtil;
import ca.uhn.fhir.rest.client.method.OperationMethodBinding;
import ca.uhn.fhir.rest.client.method.ReadMethodBinding;
import ca.uhn.fhir.rest.client.method.SearchMethodBinding;
import ca.uhn.fhir.rest.client.method.SortParameter;
import ca.uhn.fhir.rest.client.method.TransactionMethodBinding;
import ca.uhn.fhir.rest.client.method.ValidateMethodBindingDstu2Plus;
import ca.uhn.fhir.rest.gclient.IBaseQuery;
import ca.uhn.fhir.rest.gclient.IClientExecutable;
import ca.uhn.fhir.rest.gclient.ICreate;
import ca.uhn.fhir.rest.gclient.ICreateTyped;
import ca.uhn.fhir.rest.gclient.ICreateWithQuery;
import ca.uhn.fhir.rest.gclient.ICreateWithQueryTyped;
import ca.uhn.fhir.rest.gclient.ICriterion;
import ca.uhn.fhir.rest.gclient.ICriterionInternal;
import ca.uhn.fhir.rest.gclient.IDelete;
import ca.uhn.fhir.rest.gclient.IDeleteTyped;
import ca.uhn.fhir.rest.gclient.IDeleteWithQuery;
import ca.uhn.fhir.rest.gclient.IDeleteWithQueryTyped;
import ca.uhn.fhir.rest.gclient.IFetchConformanceTyped;
import ca.uhn.fhir.rest.gclient.IFetchConformanceUntyped;
import ca.uhn.fhir.rest.gclient.IGetPage;
import ca.uhn.fhir.rest.gclient.IGetPageTyped;
import ca.uhn.fhir.rest.gclient.IGetPageUntyped;
import ca.uhn.fhir.rest.gclient.IHistory;
import ca.uhn.fhir.rest.gclient.IHistoryTyped;
import ca.uhn.fhir.rest.gclient.IHistoryUntyped;
import ca.uhn.fhir.rest.gclient.IMeta;
import ca.uhn.fhir.rest.gclient.IMetaAddOrDeleteSourced;
import ca.uhn.fhir.rest.gclient.IMetaAddOrDeleteUnsourced;
import ca.uhn.fhir.rest.gclient.IMetaGetUnsourced;
import ca.uhn.fhir.rest.gclient.IOperation;
import ca.uhn.fhir.rest.gclient.IOperationProcessMsg;
import ca.uhn.fhir.rest.gclient.IOperationProcessMsgMode;
import ca.uhn.fhir.rest.gclient.IOperationUnnamed;
import ca.uhn.fhir.rest.gclient.IOperationUntyped;
import ca.uhn.fhir.rest.gclient.IOperationUntypedWithInput;
import ca.uhn.fhir.rest.gclient.IOperationUntypedWithInputAndPartialOutput;
import ca.uhn.fhir.rest.gclient.IParam;
import ca.uhn.fhir.rest.gclient.IPatch;
import ca.uhn.fhir.rest.gclient.IPatchExecutable;
import ca.uhn.fhir.rest.gclient.IPatchWithBody;
import ca.uhn.fhir.rest.gclient.IPatchWithQuery;
import ca.uhn.fhir.rest.gclient.IPatchWithQueryTyped;
import ca.uhn.fhir.rest.gclient.IQuery;
import ca.uhn.fhir.rest.gclient.IRead;
import ca.uhn.fhir.rest.gclient.IReadExecutable;
import ca.uhn.fhir.rest.gclient.IReadIfNoneMatch;
import ca.uhn.fhir.rest.gclient.IReadTyped;
import ca.uhn.fhir.rest.gclient.ISort;
import ca.uhn.fhir.rest.gclient.ITransaction;
import ca.uhn.fhir.rest.gclient.ITransactionTyped;
import ca.uhn.fhir.rest.gclient.IUntypedQuery;
import ca.uhn.fhir.rest.gclient.IUpdate;
import ca.uhn.fhir.rest.gclient.IUpdateExecutable;
import ca.uhn.fhir.rest.gclient.IUpdateTyped;
import ca.uhn.fhir.rest.gclient.IUpdateWithQuery;
import ca.uhn.fhir.rest.gclient.IUpdateWithQueryTyped;
import ca.uhn.fhir.rest.gclient.IValidate;
import ca.uhn.fhir.rest.gclient.IValidateUntyped;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.TokenParam;
@ -53,14 +135,38 @@ import com.google.common.base.Charsets;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseConformance;
import org.hl7.fhir.instance.model.api.IBaseDatatype;
import org.hl7.fhir.instance.model.api.IBaseMetaType;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import java.io.IOException;
import java.io.InputStream;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import static org.apache.commons.lang3.StringUtils.*;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* @author James Agnew
@ -749,6 +855,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
IClientResponseHandler binding;
binding = new ResourceResponseHandler(myBundleType, getPreferResponseTypes());
HttpSimpleGetClientInvocation invocation = new HttpSimpleGetClientInvocation(myContext, myUrl);
invocation.setUrlSource(UrlSourceEnum.EXPLICIT);
Map<String, List<String>> params = null;
return invoke(params, binding, invocation);
@ -1838,7 +1945,7 @@ public class GenericClient extends BaseClient implements IGenericClient {
BaseHttpClientInvocation invocation;
if (mySearchUrl != null) {
invocation = SearchMethodBinding.createSearchInvocation(myContext, mySearchUrl, params);
invocation = SearchMethodBinding.createSearchInvocation(myContext, mySearchUrl, UrlSourceEnum.EXPLICIT, params);
} else {
invocation = SearchMethodBinding.createSearchInvocation(myContext, myResourceName, params, resourceId, myCompartmentName, mySearchStyle);
}

View File

@ -24,6 +24,7 @@ import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.rest.client.api.IHttpRequest;
import ca.uhn.fhir.rest.client.api.IRestfulClient;
import ca.uhn.fhir.rest.client.api.UrlSourceEnum;
import org.apache.commons.lang3.Validate;
import static org.apache.commons.lang3.StringUtils.isBlank;
@ -81,6 +82,10 @@ public class UrlTenantSelectionInterceptor {
Validate.isTrue(requestUri.startsWith(serverBase), "Request URI %s does not start with server base %s", requestUri, serverBase);
if (theRequest.getUrlSource() == UrlSourceEnum.EXPLICIT) {
return;
}
String newUri = serverBase + "/" + tenantId + requestUri.substring(serverBase.length());
theRequest.setUri(newUri);
}

View File

@ -24,6 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.client.api.IHttpRequest;
import ca.uhn.fhir.rest.client.api.UrlSourceEnum;
import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation;
import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.lang3.StringUtils;
@ -41,17 +42,24 @@ public class HttpGetClientInvocation extends BaseHttpClientInvocation {
private final Map<String, List<String>> myParameters;
private final String myUrlPath;
private final UrlSourceEnum myUrlSource;
public HttpGetClientInvocation(FhirContext theContext, Map<String, List<String>> theParameters, String... theUrlFragments) {
this(theContext, theParameters, UrlSourceEnum.GENERATED, theUrlFragments);
}
public HttpGetClientInvocation(FhirContext theContext, Map<String, List<String>> theParameters, UrlSourceEnum theUrlSource, String... theUrlFragments) {
super(theContext);
myParameters = theParameters;
myUrlPath = StringUtils.join(theUrlFragments, '/');
myUrlSource = theUrlSource;
}
public HttpGetClientInvocation(FhirContext theContext, String theUrlPath) {
super(theContext);
myParameters = new HashMap<>();
myUrlPath = theUrlPath;
myUrlSource = UrlSourceEnum.GENERATED;
}
@ -95,7 +103,10 @@ public class HttpGetClientInvocation extends BaseHttpClientInvocation {
appendExtraParamsWithQuestionMark(theExtraParams, b, first);
return super.createHttpRequest(b.toString(), theEncoding, RequestTypeEnum.GET);
IHttpRequest retVal = super.createHttpRequest(b.toString(), theEncoding, RequestTypeEnum.GET);
retVal.setUrlSource(myUrlSource);
return retVal;
}
public Map<String, List<String>> getParameters() {

View File

@ -27,11 +27,13 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.client.api.IHttpRequest;
import ca.uhn.fhir.rest.client.api.UrlSourceEnum;
import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation;
public class HttpSimpleGetClientInvocation extends BaseHttpClientInvocation {
private final String myUrl;
private UrlSourceEnum myUrlSource = UrlSourceEnum.GENERATED;
public HttpSimpleGetClientInvocation(FhirContext theContext, String theUrlPath) {
super(theContext);
@ -40,7 +42,12 @@ public class HttpSimpleGetClientInvocation extends BaseHttpClientInvocation {
@Override
public IHttpRequest asHttpRequest(String theUrlBase, Map<String, List<String>> theExtraParams, EncodingEnum theEncoding, Boolean thePrettyPrint) {
return createHttpRequest(myUrl, theEncoding, RequestTypeEnum.GET);
IHttpRequest retVal = createHttpRequest(myUrl, theEncoding, RequestTypeEnum.GET);
retVal.setUrlSource(myUrlSource);
return retVal;
}
public void setUrlSource(UrlSourceEnum theUrlSource) {
myUrlSource = theUrlSource;
}
}

View File

@ -19,27 +19,33 @@ package ca.uhn.fhir.rest.client.method;
* limitations under the License.
* #L%
*/
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.lang.reflect.Method;
import java.util.*;
import java.util.Map.Entry;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
import ca.uhn.fhir.rest.annotation.Search;
import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.SearchStyleEnum;
import ca.uhn.fhir.rest.client.api.UrlSourceEnum;
import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation;
import ca.uhn.fhir.rest.param.ParameterUtil;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class SearchMethodBinding extends BaseResourceReturningMethodBinding {
private String myCompartmentName;
@ -157,8 +163,8 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding {
return getMethod().toString();
}
public static BaseHttpClientInvocation createSearchInvocation(FhirContext theContext, String theSearchUrl, Map<String, List<String>> theParams) {
return new HttpGetClientInvocation(theContext, theParams, theSearchUrl);
public static BaseHttpClientInvocation createSearchInvocation(FhirContext theContext, String theSearchUrl, UrlSourceEnum theUrlSource, Map<String, List<String>> theParams) {
return new HttpGetClientInvocation(theContext, theParams, theUrlSource, theSearchUrl);
}

View File

@ -34,7 +34,8 @@ import ca.uhn.fhir.rest.client.interceptor.BasicAuthInterceptor;
import ca.uhn.fhir.rest.client.interceptor.BearerTokenAuthInterceptor;
import ca.uhn.fhir.rest.client.interceptor.CookieInterceptor;
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import org.hl7.fhir.r4.model.*;
import ca.uhn.fhir.rest.client.interceptor.UrlTenantSelectionInterceptor;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Patient;
public class ClientExamples {
@ -60,6 +61,29 @@ public class ClientExamples {
// END SNIPPET: proxy
}
public void tenantId() {
// START SNIPPET: tenantId
FhirContext ctx = FhirContext.forR4();
// Create the client
IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir");
// Register the interceptor
UrlTenantSelectionInterceptor tenantSelection = new UrlTenantSelectionInterceptor();
genericClient.registerInterceptor(tenantSelection);
// Read from tenant A
tenantSelection.setTenantId("TENANT-A");
Patient patientA = genericClient.read().resource(Patient.class).withId("123").execute();
// Read from tenant B
tenantSelection.setTenantId("TENANT-B");
Patient patientB = genericClient.read().resource(Patient.class).withId("456").execute();
// END SNIPPET: tenantId
}
@SuppressWarnings("unused")
public void processMessage() {
// START SNIPPET: processMessage

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 1499
title: "When performing a search with a DateParam that has DAY precision, rely on new ordinal date field for comparison
instead of attempting to find oldest and newest instant that could be valid."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 1710
title: The classes BaseOrListParam and BaseParam now have public visibility in order to make it easier to
create more generic APIs. Thanks to GitHub user @ibacher for the pull request!

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 1734
title: The Pointcut JavaDoc had an incorrect link from one pointcut to another and has been fixed. Thanks
to Bert Roos for the pull request!

View File

@ -0,0 +1,5 @@
---
type: add
issue: 1749
title: A new constructor has been added to RestfulServer that accepts an InterceptorService. Thanks to gematik FuE for the
pull request!

View File

@ -0,0 +1,6 @@
---
type: add
issue: 1788
title: "The ApacheProxyAddressStrategy has been improved to add support for additional proxy headers inclusing
`X-Forwarded-Host`, `X-Forwarded-Proto`, `X-Forwarded-Port`, and `X-Forwarded-Prefix`. Thanks to Thomas Papke
for the pull request!"

View File

@ -0,0 +1,5 @@
---
type: add
issue: 1812
title: The JAX-RS server will now scan and serve ResourceProvider methods defined in super-classes as well. Thanks
to Zhe Wang for the pull request!

View File

@ -0,0 +1,7 @@
---
type: perf
issue: 1813
title: History operations in the JPA server have been significantly optimized to remove the number of SQL SELECT statements,
and to completely eliminate any INSERT statements. This should have a positive effect on heavy users of history
operations. In addition, history operations will no longer write an entry in the query cache (HFJ_SEARCH) table which
should further improve performance of this operation.

View File

@ -0,0 +1,5 @@
---
type: add
issue: 1824
title: Native support for UCUM has been added to the validation stack, meaning that UCUM codes can be validated
at runtime without the need for any external validation.

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 1829
title: "In the JPA server, performing a search where the only search parameter was the `_tag` parameter could cause resources
of the wrong type to be included in search results. This has been corrected."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 1831
title: "Indexing for the :text modifier can now be globally or selectively disabled in the JPA server. This can have a measurable
impact on index sizes and write speed in servers with large numbers of token indexes."

View File

@ -9,6 +9,7 @@
<li>Hibernate Validator (JPA): 5.4.2.Final -&gt; 6.1.3.Final</li>
<li>Guava (JPA): 28.0 -&gt; 28.2</li>
<li>Spring Boot (Boot): 2.2.0.RELEASE -&gt; 2.2.6.RELEASE</li>
<li>FlywayDB (JPA) 6.1.0 -&gt; 6.4.1</li>
</ul>"
- item:
issue: "1583"
@ -57,7 +58,7 @@
issue: "1807"
type: "change"
title: "**New Feature**:
A new feature has been added to the JPA server called **[Partitioning](/hapi-fhir/docs/server_jpa/partitioning.html). This
A new feature has been added to the JPA server called **[Partitioning](/hapi-fhir/docs/server_jpa_partitioning/partitioning.html). This
feature allows data to be segregated using a user defined partitioning strategy. This can be leveraged to take
advantags of native RDBMS partition strategies, and also to implement **multitenant servers**.
"

View File

@ -2,79 +2,63 @@
This page contains examples of how to use the client to perform complete tasks. If you have an example you could contribute, we'd love to hear from you!
# Transaction With Placeholder IDs
# Transaction With Conditional Create
The following example shows how to post a transaction with two resources, where one resource contains a reference to the other. A temporary ID (a UUID) is used as an ID to refer to, and this ID will be replaced by the server by a permanent ID.
The following example demonstrates a common scenario: How to create a new piece of data for a Patient (in this case, an Observation) where the identifier of the Patient is known, but the ID is not.
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ClientTransactionExamples.java|conditional}}
```
In this scenario, we want to look up the Patient record and reference it from the newly created Observation. In the event that no Patient record already exists with the given identifier, a new one will be created and the Observation will reference it. This is known in FHIR as a [Conditional Create](http://hl7.org/fhir/http.html#ccreate).
This code creates the following transaction bundle:
**JSON**:
```json
{
"resourceType": "Bundle",
"type": "transaction",
"entry": [
{
"fullUrl": "urn:uuid:3bc44de3-069d-442d-829b-f3ef68cae371",
"resource": {
"resourceType": "Patient",
"identifier": [
{
"system": "http://acme.org/mrns",
"value": "12345"
}
],
"name": [
{
"family": "Jameson",
"given": [
"J",
"Jonah"
]
}
],
"gender": "male"
"entry": [ {
"fullUrl": "urn:uuid:3bc44de3-069d-442d-829b-f3ef68cae371",
"resource": {
"resourceType": "Patient",
"identifier": [ {
"system": "http://acme.org/mrns",
"value": "12345"
} ],
"name": [ {
"family": "Jameson",
"given": [ "J", "Jonah" ]
} ],
"gender": "male"
},
"request": {
"method": "POST",
"url": "Patient",
"ifNoneExist": "identifier=http://acme.org/mrns|12345"
}
}, {
"resource": {
"resourceType": "Observation",
"status": "final",
"code": {
"coding": [ {
"system": "http://loinc.org",
"code": "789-8",
"display": "Erythrocytes [#/volume] in Blood by Automated count"
} ]
},
"request": {
"method": "POST",
"url": "Patient",
"ifNoneExist": "identifier=http://acme.org/mrns|12345"
"subject": {
"reference": "urn:uuid:3bc44de3-069d-442d-829b-f3ef68cae371"
},
"valueQuantity": {
"value": 4.12,
"unit": "10 trillion/L",
"system": "http://unitsofmeasure.org",
"code": "10*12/L"
}
},
{
"resource": {
"resourceType": "Observation",
"status": "final",
"code": {
"coding": [
{
"system": "http://loinc.org",
"code": "789-8",
"display": "Erythrocytes [#/volume] in Blood by Automated count"
}
]
},
"subject": {
"reference": "urn:uuid:3bc44de3-069d-442d-829b-f3ef68cae371"
},
"valueQuantity": {
"value": 4.12,
"unit": "10 trillion/L",
"system": "http://unitsofmeasure.org",
"code": "10*12/L"
}
},
"request": {
"method": "POST",
"url": "Observation"
}
"request": {
"method": "POST",
"url": "Observation"
}
]
} ]
}
```
@ -164,6 +148,12 @@ The server responds with the following response. Note that the ID of the already
</Bundle>
```
To produce this transaction in Java code:
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ClientTransactionExamples.java|conditional}}
```
# Fetch all Pages of a Bundle
This following example shows how to load all pages of a bundle by fetching each page one-after-the-other and then joining the results.

View File

@ -45,9 +45,14 @@ page.server_jpa.schema=Database Schema
page.server_jpa.configuration=Configuration
page.server_jpa.search=Search
page.server_jpa.performance=Performance
page.server_jpa.partitioning=Partitioning and Multitenancy
page.server_jpa.upgrading=Upgrade Guide
section.server_jpa_partitioning.title=JPA Server: Partitioning and Multitenancy
page.server_jpa_partitioning.partitioning=Partitioning and Multitenancy
page.server_jpa_partitioning.partitioning_management_operations=Partitioning Management Operations
page.server_jpa_partitioning.enabling_in_hapi_fhir=Enabling Partitioning in HAPI FHIR
section.interceptors.title=Interceptors
page.interceptors.interceptors=Interceptors Overview
page.interceptors.client_interceptors=Client Interceptors

View File

@ -82,6 +82,9 @@ When communicating with a server that supports [URL Base Multitenancy](/docs/ser
* [UrlTenantSelectionInterceptor JavaDoc](/apidocs/hapi-fhir-client/ca/uhn/fhir/rest/client/interceptor/UrlTenantSelectionInterceptor.html)
* [UrlTenantSelectionInterceptor Source](https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/UrlTenantSelectionInterceptor.java)
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/ClientExamples.java|tenantId}}
```
# Performance: GZip Outgoing Request Bodies

View File

@ -28,7 +28,7 @@ This interceptor will then produce output similar to the following:
# Partitioning: Multitenant Request Partition
If the JPA server has [partitioning](/docs/server_jpa/partitioning.html) enabled, the RequestTenantPartitionInterceptor can be used in combination with a [Tenant Identification Strategy](/docs/server_plain/multitenancy.html) in order to achieve a multitenant solution. See [JPA Server Partitioning](/docs/server_jpa/partitioning.html) for more information on partitioning.
If the JPA server has [partitioning](/docs/server_jpa_partitioning/partitioning.html) enabled, the RequestTenantPartitionInterceptor can be used in combination with a [Tenant Identification Strategy](/docs/server_plain/multitenancy.html) in order to achieve a multitenant solution. See [JPA Server Partitioning](/docs/server_jpa_partitioning/partitioning.html) for more information on partitioning.
* [RequestTenantPartitionInterceptor JavaDoc](/apidocs/hapi-fhir-server/ca/uhn/fhir/rest/server/interceptor/partition/RequestTenantPartitionInterceptor.html)
* [RequestTenantPartitionInterceptor Source](https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/partition/RequestTenantPartitionInterceptor.java)

View File

@ -9,3 +9,27 @@ On servers where a large amount of data will be ingested, the following consider
* Optimize your database thread pool count and HTTP client thread count: Every environment will have a different optimal setting for the number of concurrent writes that are permitted, and the maximum number of database connections allowed.
* Disable deletes: If the JPA server is configured to have the FHIR delete operation disabled, it is able to skip some resource reference deletion checks during resource creation, which can have a measurable improvement to performance over large datasets.
# Disabling :text Indexing
On servers storing large numbers of Codings and CodeableConcepts (as well as any other token SearchParameter target where the `:text` modifier is supported), the indexes required to support the `:text` modifier can consume a large amount of index space, and cause a masurable impact on write times.
This modifier can be disabled globally by using the ModelConfig#setSuppressStringIndexingInTokens setting.
It can also be disabled at a more granular level (or selectively re-enabled if it disabled globally) by using an extension on individual SearchParameter resources. For example, the following SearchParameter disables text indexing on the Observation:code parameter:
```json
{
"resourceType": "SearchParameter",
"id": "observation-code",
"extension": [ {
"url": "http://hapifhir.io/fhir/StructureDefinition/searchparameter-token-suppress-text-index",
"valueBoolean": true
} ],
"status": "active",
"code": "code",
"base": [ "Observation" ],
"type": "token",
"expression": "Observation.code"
}
```

View File

@ -39,7 +39,7 @@ The HFJ_RESOURCE table indicates a single resource of any type in the database.
<td>Integer</td>
<td>Nullable</td>
<td>
This is the optional partition ID, if the resource is in a partition. See <a href="./partitioning.html">Partitioning</a>.
This is the optional partition ID, if the resource is in a partition. See <a href="/hapi-fhir/docs/server_jpa_partitioning/partitioning.html">Partitioning</a>.
</td>
</tr>
<tr>
@ -48,7 +48,7 @@ The HFJ_RESOURCE table indicates a single resource of any type in the database.
<td>Timestamp</td>
<td>Nullable</td>
<td>
This is the optional partition date, if the resource is in a partition. See <a href="./partitioning.html">Partitioning</a>.
This is the optional partition date, if the resource is in a partition. See <a href="/hapi-fhir/docs/server_jpa_partitioning/partitioning.html">Partitioning</a>.
</td>
</tr>
<tr>
@ -154,7 +154,7 @@ The complete raw contents of the resource is stored in the `RES_TEXT` column, us
<td>Integer</td>
<td>Nullable</td>
<td>
This is the optional partition ID, if the resource is in a partition. See <a href="./partitioning.html">Partitioning</a>.
This is the optional partition ID, if the resource is in a partition. See <a href="/hapi-fhir/docs/server_jpa_partitioning/partitioning.html">Partitioning</a>.
</td>
</tr>
<tr>
@ -163,7 +163,7 @@ The complete raw contents of the resource is stored in the `RES_TEXT` column, us
<td>Timestamp</td>
<td>Nullable</td>
<td>
This is the optional partition date, if the resource is in a partition. See <a href="./partitioning.html">Partitioning</a>.
This is the optional partition date, if the resource is in a partition. See <a href="/hapi-fhir/docs/server_jpa_partitioning/partitioning.html">Partitioning</a>.
</td>
</tr>
<tr>
@ -263,7 +263,7 @@ If the server has been configured with a [Resource Server ID Strategy](/apidocs/
<td>Integer</td>
<td>Nullable</td>
<td>
This is the optional partition ID, if the resource is in a partition. See <a href="./partitioning.html">Partitioning</a>.
This is the optional partition ID, if the resource is in a partition. See <a href="/hapi-fhir/docs/server_jpa_partitioning/partitioning.html">Partitioning</a>.
</td>
</tr>
<tr>
@ -272,7 +272,7 @@ If the server has been configured with a [Resource Server ID Strategy](/apidocs/
<td>Timestamp</td>
<td>Nullable</td>
<td>
This is the optional partition date, if the resource is in a partition. See <a href="./partitioning.html">Partitioning</a>.
This is the optional partition date, if the resource is in a partition. See <a href="/hapi-fhir/docs/server_jpa_partitioning/partitioning.html">Partitioning</a>.
</td>
</tr>
<tr>
@ -332,7 +332,7 @@ When a resource is created or updated, it is indexed for searching. Any search p
<td>Integer</td>
<td>Nullable</td>
<td>
This is the optional partition ID, if the resource is in a partition. See <a href="./partitioning.html">Partitioning</a>.
This is the optional partition ID, if the resource is in a partition. See <a href="/hapi-fhir/docs/server_jpa_partitioning/partitioning.html">Partitioning</a>.
Note that the partition indicated by the <b>PARTITION_ID</b> and <b>PARTITION_DATE</b> columns refers to the partition
of the <i>SOURCE</i> resource, and not necessarily the <i>TARGET</i>.
</td>
@ -343,7 +343,7 @@ When a resource is created or updated, it is indexed for searching. Any search p
<td>Timestamp</td>
<td>Nullable</td>
<td>
This is the optional partition date, if the resource is in a partition. See <a href="./partitioning.html">Partitioning</a>.
This is the optional partition date, if the resource is in a partition. See <a href="/hapi-fhir/docs/server_jpa_partitioning/partitioning.html">Partitioning</a>.
Note that the partition indicated by the <b>PARTITION_ID</b> and <b>PARTITION_DATE</b> columns refers to the partition
of the <i>SOURCE</i> resource, and not necessarily the <i>TARGET</i>.
</td>
@ -448,7 +448,7 @@ The following columns are common to **all HFJ_SPIDX_xxx tables**.
<td>Integer</td>
<td>Nullable</td>
<td>
This is the optional partition ID, if the resource is in a partition. See <a href="./partitioning.html">Partitioning</a>.
This is the optional partition ID, if the resource is in a partition. See <a href="/hapi-fhir/docs/server_jpa_partitioning/partitioning.html">Partitioning</a>.
Note that the partition indicated by the <b>PARTITION_ID</b> and <b>PARTITION_DATE</b> columns refers to the partition
of the <i>SOURCE</i> resource, and not necessarily the <i>TARGET</i>.
</td>
@ -459,7 +459,7 @@ The following columns are common to **all HFJ_SPIDX_xxx tables**.
<td>Timestamp</td>
<td>Nullable</td>
<td>
This is the optional partition date, if the resource is in a partition. See <a href="./partitioning.html">Partitioning</a>.
This is the optional partition date, if the resource is in a partition. See <a href="/hapi-fhir/docs/server_jpa_partitioning/partitioning.html">Partitioning</a>.
Note that the partition indicated by the <b>PARTITION_ID</b> and <b>PARTITION_DATE</b> columns refers to the partition
of the <i>SOURCE</i> resource, and not necessarily the <i>TARGET</i>.
</td>

View File

@ -2,10 +2,6 @@
The HAPI FHIR JPA Server fully implements most [FHIR search](https://www.hl7.org/fhir/search.html) operations for most versions of FHIR. However, there are some known limitations of the current implementation. Here is a partial list of search functionality that is not currently supported in HAPI FHIR:
### Date searches without timestamp
Searching by date with no timestamp currently doesn't match all records it should. See [Issue 1499](https://github.com/jamesagnew/hapi-fhir/issues/1499).
### Chains within _has
Chains within _has are not currently supported for performance reasons. For example, this search is not currently supported

View File

@ -0,0 +1,11 @@
# Enabling Partitioning in HAPI FHIR
Follow these steps to enable partitioning on the server:
The [PartitionSettings](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html) bean contains configuration settings related to partitioning within the server. To enable partitioning, the [setPartitioningEnabled(boolean)](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setPartitioningEnabled(boolean)) property should be enabled.
The following settings can be enabled:
* **Include Partition in Search Hashes** ([JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setIncludePartitionInSearchHashes(boolean))): If this feature is enabled, partition IDs will be factored into [Search Hashes](/hapi-fhir/docs/server_jpa/schema.html#search-hashes). When this flag is not set (as is the default), when a search requests a specific partition, an additional SQL WHERE predicate is added to the query to explicitly request the given partition ID. When this flag is set, this additional WHERE predicate is not necessary since the partition is factored into the hash value being searched on. Setting this flag avoids the need to manually adjust indexes against the HFJ_SPIDX tables. Note that this flag should **not be used in environments where partitioning is being used for security purposes**, since it is possible for a user to reverse engineer false hash collisions.
* **Cross-Partition Reference Mode**: ([JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setAllowReferencesAcrossPartitions(ca.uhn.fhir.jpa.model.config.PartitionSettings.CrossPartitionReferenceMode))): This setting controls whether resources in one partition should be allowed to create references to resources in other partitions.

View File

@ -26,16 +26,16 @@ Partitioning in HAPI FHIR JPA means that every resource has a partition identity
* **Partition Date**: This is an additional partition discriminator that can be used to implement partitioning strategies using a date axis.
Mappings between the **Partition Name** and the **Partition ID** are maintained using the [Partition Mapping Operations](#partition-mapping-operations).
Mappings between the **Partition Name** and the **Partition ID** are maintained using the [Partition Management Operations](./partitioning_management_operations.html).
## Logical Architecture
At the database level, partitioning involves the use of two dedicated columns to many tables within the HAPI FHIR JPA [database schema](./schema.html):
At the database level, partitioning involves the use of two dedicated columns to many tables within the HAPI FHIR JPA [database schema](/hapi-fhir/docs/server_jpa/schema.html):
* **PARTITION_ID** &ndash; This is an integer indicating the specific partition that a given resource is placed in. This column can also be *NULL*, meaning that the given resource is in the **Default Partition**.
* **PARTITION_DATE** &ndash; This is a date/time column that can be assigned an arbitrary value depending on your use case. Typically, this would be used for use cases where data should be automatically dropped after a certain time period using native database partition drops.
When partitioning is used, these two columns will be populated with the same value for a given resource on all resource-specific tables (this includes [HFJ_RESOURCE](./schema.html#HFJ_RESOURCE) and all tables that have a foreign key relationship to it including [HFJ_RES_VER](./schema.html#HFJ_RES_VER), [HFJ_RESLINK](./schema.html#HFJ_RES_LINK), [HFJ_SPIDX_*](./schema.html#search-indexes), etc.)
When partitioning is used, these two columns will be populated with the same value for a given resource on all resource-specific tables (this includes [HFJ_RESOURCE](/hapi-fhir/docs/server_jpa/schema.html#HFJ_RESOURCE) and all tables that have a foreign key relationship to it including [HFJ_RES_VER](/hapi-fhir/docs/server_jpa/schema.html#HFJ_RES_VER), [HFJ_RESLINK](/hapi-fhir/docs/server_jpa/schema.html#HFJ_RES_LINK), [HFJ_SPIDX_*](/hapi-fhir/docs/server_jpa/schema.html#search-indexes), etc.)
When a new resource is **created**, an [interceptor hook](#partition-interceptors) is invoked to request the partition ID and date to be assigned to the resource.
@ -46,18 +46,6 @@ When a **read operation** is being performed (e.g. a read, search, history, etc.
* The system can be configured to operate as a **multitenant** solution by configuring the partition interceptor to scope all read operations to read data only from the partition that request has access to.```
* The system can be configured to operate with logical segments by configuring the partition interceptor to scope read operations to access all partitions.
# Enabling Partitioning in HAPI FHIR
Follow these steps to enable partitioning on the server:
The [PartitionSettings](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html) bean contains configuration settings related to partitioning within the server. To enable partitioning, the [setPartitioningEnabled(boolean)](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setPartitioningEnabled(boolean)) property should be enabled.
The following settings can be enabled:
* **Include Partition in Search Hashes** ([JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setIncludePartitionInSearchHashes(boolean))): If this feature is enabled, partition IDs will be factored into [Search Hashes](./schema.html#search-hashes). When this flag is not set (as is the default), when a search requests a specific partition, an additional SQL WHERE predicate is added to the query to explicitly request the given partition ID. When this flag is set, this additional WHERE predicate is not necessary since the partition is factored into the hash value being searched on. Setting this flag avoids the need to manually adjust indexes against the HFJ_SPIDX tables. Note that this flag should **not be used in environments where partitioning is being used for security purposes**, since it is possible for a user to reverse engineer false hash collisions.
* **Cross-Partition Reference Mode**: ([JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setAllowReferencesAcrossPartitions(ca.uhn.fhir.jpa.model.config.PartitionSettings.CrossPartitionReferenceMode))): This setting controls whether resources in one partition should be allowed to create references to resources in other partitions.
# Partition Interceptors
@ -122,193 +110,6 @@ The following snippet shows a server with this configuration.
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/PartitionExamples.java|multitenantServer}}
```
<a name="partition-mapping-operations"/>
# Partition Mapping Operations
Several operations exist that can be used to manage the existence of partitions. These operations are supplied by a [plain provider](/docs/server_plain/resource_providers.html#plain-providers) called [PartitionManagementProvider](/hapi-fhir/apidocs/hapi-fhir-jpaserver-base/ca/uhn/fhir/jpa/partition/PartitionManagementProvider.html).
Before a partition can be used, it must be registered using these methods.
## Creating a Partition
The `$partition-management-add-partition` operation can be used to create a new partition. This operation takes the following parameters:
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Name</th>
<th>Type</th>
<th>Cardinality</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td>id</td>
<td>Integer</td>
<td>1..1</td>
<td>
The numeric ID for the partition. This value can be any integer, positive or negative or zero. It must not be a value that has already been used.
</td>
</tr>
<tr>
<td>name</td>
<td>Code</td>
<td>1..1</td>
<td>
A code (string) to assign to the partition.
</td>
</tr>
<tr>
<td>description</td>
<td>String</td>
<td>0..1</td>
<td>
An optional description for the partition.
</td>
</tr>
</tbody>
</table>
### Example
An HTTP POST to the following URL would be used to invoke this operation:
```url
http://example.com/$partition-management-add-partition
```
The following request body could be used:
```json
{
"resourceType": "Parameters",
"parameter": [ {
"name": "id",
"valueInteger": 123
}, {
"name": "name",
"valueCode": "PARTITION-123"
}, {
"name": "description",
"valueString": "a description"
} ]
}
```
## Updating a Partition
The `$partition-management-update-partition` operation can be used to update an existing partition. This operation takes the following parameters:
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Name</th>
<th>Type</th>
<th>Cardinality</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td>id</td>
<td>Integer</td>
<td>1..1</td>
<td>
The numeric ID for the partition to update. This ID must already exist.
</td>
</tr>
<tr>
<td>name</td>
<td>Code</td>
<td>1..1</td>
<td>
A code (string) to assign to the partition. Note that it is acceptable to change the name of a partition, but this should be done with caution since partition names may be referenced by URLs, caches, etc.
</td>
</tr>
<tr>
<td>description</td>
<td>String</td>
<td>0..1</td>
<td>
An optional description for the partition.
</td>
</tr>
</tbody>
</table>
### Example
An HTTP POST to the following URL would be used to invoke this operation:
```url
http://example.com/$partition-management-add-partition
```
The following request body could be used:
```json
{
"resourceType": "Parameters",
"parameter": [ {
"name": "id",
"valueInteger": 123
}, {
"name": "name",
"valueCode": "PARTITION-123"
}, {
"name": "description",
"valueString": "a description"
} ]
}
```
## Deleting a Partition
The `$partition-management-delete-partition` operation can be used to delete an existing partition. This operation takes the following parameters:
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Name</th>
<th>Type</th>
<th>Cardinality</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td>id</td>
<td>Integer</td>
<td>1..1</td>
<td>
The numeric ID for the partition to update. This ID must already exist.
</td>
</tr>
</tbody>
</table>
### Example
An HTTP POST to the following URL would be used to invoke this operation:
```url
http://example.com/$partition-management-delete-partition
```
The following request body could be used:
```json
{
"resourceType": "Parameters",
"parameter": [ {
"name": "id",
"valueInteger": 123
} ]
}
```
# Limitations
@ -328,5 +129,7 @@ None of the limitations listed here are considered permanent. Over time the HAPI
* ConceptMap
* **Search Parameters are not partitioned**: There is only one set of SearchParameter resources for the entire system, and any search parameters will apply to resources in all partitions. All SearchParameter resources must be stored in the default partition.
* **Cross-partition History Operations are not supported**: It is not possible to perform a `_history` operation that spans all partitions (`_history` does work when applied to a single partition however).
* **Bulk Operations are not partition aware**: Bulk export operations will export data across all partitions.

View File

@ -0,0 +1,185 @@
# Partition Mapping Operations
Several operations exist that can be used to manage the existence of partitions. These operations are supplied by a [plain provider](/docs/server_plain/resource_providers.html#plain-providers) called [PartitionManagementProvider](/hapi-fhir/apidocs/hapi-fhir-jpaserver-base/ca/uhn/fhir/jpa/partition/PartitionManagementProvider.html).
Before a partition can be used, it must be registered using these methods.
## Creating a Partition
The `$partition-management-create-partition` operation can be used to create a new partition. This operation takes the following parameters:
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Name</th>
<th>Type</th>
<th>Cardinality</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td>id</td>
<td>Integer</td>
<td>1..1</td>
<td>
The numeric ID for the partition. This value can be any integer, positive or negative or zero. It must not be a value that has already been used.
</td>
</tr>
<tr>
<td>name</td>
<td>Code</td>
<td>1..1</td>
<td>
A code (string) to assign to the partition.
</td>
</tr>
<tr>
<td>description</td>
<td>String</td>
<td>0..1</td>
<td>
An optional description for the partition.
</td>
</tr>
</tbody>
</table>
### Example
An HTTP POST to the following URL would be used to invoke this operation:
```url
http://example.com/$partition-management-create-partition
```
The following request body could be used:
```json
{
"resourceType": "Parameters",
"parameter": [ {
"name": "id",
"valueInteger": 123
}, {
"name": "name",
"valueCode": "PARTITION-123"
}, {
"name": "description",
"valueString": "a description"
} ]
}
```
## Updating a Partition
The `$partition-management-update-partition` operation can be used to update an existing partition. This operation takes the following parameters:
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Name</th>
<th>Type</th>
<th>Cardinality</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td>id</td>
<td>Integer</td>
<td>1..1</td>
<td>
The numeric ID for the partition to update. This ID must already exist.
</td>
</tr>
<tr>
<td>name</td>
<td>Code</td>
<td>1..1</td>
<td>
A code (string) to assign to the partition. Note that it is acceptable to change the name of a partition, but this should be done with caution since partition names may be referenced by URLs, caches, etc.
</td>
</tr>
<tr>
<td>description</td>
<td>String</td>
<td>0..1</td>
<td>
An optional description for the partition.
</td>
</tr>
</tbody>
</table>
### Example
An HTTP POST to the following URL would be used to invoke this operation:
```url
http://example.com/$partition-management-create-partition
```
The following request body could be used:
```json
{
"resourceType": "Parameters",
"parameter": [ {
"name": "id",
"valueInteger": 123
}, {
"name": "name",
"valueCode": "PARTITION-123"
}, {
"name": "description",
"valueString": "a description"
} ]
}
```
## Deleting a Partition
The `$partition-management-delete-partition` operation can be used to delete an existing partition. This operation takes the following parameters:
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Name</th>
<th>Type</th>
<th>Cardinality</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td>id</td>
<td>Integer</td>
<td>1..1</td>
<td>
The numeric ID for the partition to update. This ID must already exist.
</td>
</tr>
</tbody>
</table>
### Example
An HTTP POST to the following URL would be used to invoke this operation:
```url
http://example.com/$partition-management-delete-partition
```
The following request body could be used:
```json
{
"resourceType": "Parameters",
"parameter": [ {
"name": "id",
"valueInteger": 123
} ]
}
```

View File

@ -98,6 +98,17 @@ The following table lists vocabulary that is validated by this module:
added in the future, please get in touch if you would like to help.
</td>
</tr>
<tr>
<td>Unified Codes for Units of Measure (UCUM)</td>
<td>
ValueSet: <code><a href="http://hl7.org/fhir/ValueSet/ucum-units">(...)/ValueSet/ucum-units</a></code>
<br/>
CodeSystem: <code>http://unitsofmeasure.org</code>
</td>
<td>
Codes are validated using the UcumEssenceService provided by the <a href="https://github.com/FHIR/Ucum-java">UCUM Java</a> library.
</td>
</tr>
</tbody>
</table>

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jaxrs.client;
*/
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.client.api.BaseHttpRequest;
import ca.uhn.fhir.rest.client.api.IHttpRequest;
import ca.uhn.fhir.rest.client.api.IHttpResponse;
import ca.uhn.fhir.util.StopWatch;
@ -28,7 +29,11 @@ import ca.uhn.fhir.util.StopWatch;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.core.Response;
import java.util.*;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* A Http Request based on JaxRs. This is an adapter around the class
@ -36,7 +41,7 @@ import java.util.*;
*
* @author Peter Van Houte | peter.vanhoute@agfa.com | Agfa Healthcare
*/
public class JaxRsHttpRequest implements IHttpRequest {
public class JaxRsHttpRequest extends BaseHttpRequest implements IHttpRequest {
private final Map<String, List<String>> myHeaders = new HashMap<>();
private Invocation.Builder myRequest;

View File

@ -20,17 +20,20 @@ package ca.uhn.fhir.jaxrs.server.util;
* #L%
*/
import java.lang.reflect.Method;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.lang3.StringUtils;
import ca.uhn.fhir.jaxrs.server.AbstractJaxRsProvider;
import ca.uhn.fhir.rest.annotation.Search;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.server.exceptions.NotImplementedOperationException;
import ca.uhn.fhir.rest.server.method.*;
import ca.uhn.fhir.rest.server.method.BaseMethodBinding;
import ca.uhn.fhir.rest.server.method.OperationMethodBinding;
import ca.uhn.fhir.rest.server.method.SearchMethodBinding;
import ca.uhn.fhir.util.ReflectionUtil;
import org.apache.commons.lang3.StringUtils;
import java.lang.reflect.Method;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
/**
* Class that contains the method bindings defined by a ResourceProvider
@ -52,7 +55,9 @@ public class JaxRsMethodBindings {
* @param theProviderClass the class definition contaning the operations
*/
public JaxRsMethodBindings(AbstractJaxRsProvider theProvider, Class<? extends AbstractJaxRsProvider> theProviderClass) {
for (final Method m : ReflectionUtil.getDeclaredMethods(theProviderClass)) {
List<Method> declaredMethodsForCurrentProvider = ReflectionUtil.getDeclaredMethods(theProviderClass);
declaredMethodsForCurrentProvider.addAll(ReflectionUtil.getDeclaredMethods(theProviderClass.getSuperclass()));
for (final Method m : declaredMethodsForCurrentProvider) {
final BaseMethodBinding<?> foundMethodBinding = BaseMethodBinding.bindMethod(m, theProvider.getFhirContext(), theProvider);
if (foundMethodBinding == null) {
continue;

View File

@ -0,0 +1,34 @@
package ca.uhn.fhir.jaxrs.server.test;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jaxrs.server.AbstractJaxRsResourceProvider;
import ca.uhn.fhir.rest.annotation.RequiredParam;
import ca.uhn.fhir.rest.annotation.Search;
import ca.uhn.fhir.rest.param.StringParam;
import org.hl7.fhir.r4.model.Patient;
import java.util.List;
/**
* A dummy patient provider exposing no methods
*/
public abstract class AbstractDummyPatientProvider extends AbstractJaxRsResourceProvider<Patient> {
public AbstractDummyPatientProvider() {
super(FhirContext.forR4());
}
@Override
public abstract String getBaseForServer();
@Search
public List<Patient> search(@RequiredParam(name = Patient.SP_NAME) final StringParam name) {
return null;
}
@Override
public Class<Patient> getResourceType() {
return Patient.class;
}
}

View File

@ -1,15 +1,11 @@
package ca.uhn.fhir.jaxrs.server.test;
import ca.uhn.fhir.jaxrs.server.AbstractJaxRsResourceProvider;
import org.hl7.fhir.r4.model.Patient;
/**
* A dummy patient provider exposing no methods
*/
public class TestJaxRsDummyPatientProviderR4 extends AbstractJaxRsResourceProvider<Patient> {
public class TestJaxRsDummyPatientProviderR4 extends AbstractDummyPatientProvider {
@Override
public Class<Patient> getResourceType() {
return Patient.class;
@Override public String getBaseForServer() {
return "https://fhirserver/fhir/r4";
}
}

View File

@ -0,0 +1,12 @@
package ca.uhn.fhir.jaxrs.server.test;
/**
* A dummy patient provider exposing no methods
*/
public class TestJaxRsDummyPatientProviderR4MimeType extends AbstractDummyPatientProvider {
@Override public String getBaseForServer() {
return "https://fhirserver/fhir";
}
}

View File

@ -0,0 +1,28 @@
package ca.uhn.fhir.jaxrs.server.util;
import ca.uhn.fhir.jaxrs.server.test.AbstractDummyPatientProvider;
import ca.uhn.fhir.jaxrs.server.test.TestJaxRsDummyPatientProviderR4;
import ca.uhn.fhir.jaxrs.server.test.TestJaxRsDummyPatientProviderR4MimeType;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import static org.junit.Assert.assertEquals;
@FixMethodOrder(MethodSorters.DEFAULT)
public class JaxRsMethodBindingsMimeTypeTest {
@Before
public void setUp() {
JaxRsMethodBindings.getClassBindings().clear();
}
@Test
public void testFindMethodsFor2ProvidersWithMethods() {
assertEquals(AbstractDummyPatientProvider.class, new TestJaxRsDummyPatientProviderR4().getBindings().getBinding(RestOperationTypeEnum.SEARCH_TYPE, "").getMethod().getDeclaringClass());
assertEquals(AbstractDummyPatientProvider.class, new TestJaxRsDummyPatientProviderR4MimeType().getBindings().getBinding(RestOperationTypeEnum.SEARCH_TYPE, "").getMethod().getDeclaringClass());
}
}

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.api.config;
import ca.uhn.fhir.jpa.api.model.WarmCacheEntry;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.searchparam.SearchParamConstants;
import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
import com.google.common.annotations.VisibleForTesting;
@ -932,6 +933,43 @@ public class DaoConfig {
myModelConfig.setAllowExternalReferences(theAllowExternalReferences);
}
/**
* <p>
* Should searches use the integer field {@code SP_VALUE_LOW_DATE_ORDINAL} and {@code SP_VALUE_HIGH_DATE_ORDINAL} in
* {@link ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate} when resolving searches where all predicates are using
* precision of {@link ca.uhn.fhir.model.api.TemporalPrecisionEnum#DAY}.
*
* For example, if enabled, the search of {@code Observation?date=2020-02-25} will cause the date to be collapsed down to an
* ordinal {@code 20200225}. It would then be compared against {@link ResourceIndexedSearchParamDate#getValueLowDateOrdinal()}
* and {@link ResourceIndexedSearchParamDate#getValueHighDateOrdinal()}
* </p>
* Default is {@literal true} beginning in HAPI FHIR 5.0
* </p>
*
* @since 5.0
*/
public void setUseOrdinalDatesForDayPrecisionSearches(boolean theUseOrdinalDates) {
myModelConfig.setUseOrdinalDatesForDayPrecisionSearches(theUseOrdinalDates);
}
/**
* <p>
* Should searches use the integer field {@code SP_VALUE_LOW_DATE_ORDINAL} and {@code SP_VALUE_HIGH_DATE_ORDINAL} in
* {@link ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate} when resolving searches where all predicates are using
* precision of {@link ca.uhn.fhir.model.api.TemporalPrecisionEnum#DAY}.
*
* For example, if enabled, the search of {@code Observation?date=2020-02-25} will cause the date to be collapsed down to an
* integer representing the ordinal date {@code 20200225}. It would then be compared against {@link ResourceIndexedSearchParamDate#getValueLowDateOrdinal()}
* and {@link ResourceIndexedSearchParamDate#getValueHighDateOrdinal()}
* </p>
* Default is {@literal true} beginning in HAPI FHIR 5.0
* </p>
*
* @since 5.0
*/
public boolean getUseOrdinalDatesForDayPrecisionSearches() {
return myModelConfig.getUseOrdinalDatesForDayPrecisionSearches();
}
/**
* @see #setAllowInlineMatchUrlReferences(boolean)
*/

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.bulk;
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
@ -231,7 +232,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
map.setLastUpdated(new DateRangeParam(job.getSince(), null));
}
IResultIterator resultIterator = sb.createQuery(map, new SearchRuntimeDetails(null, theJobUuid), null, null);
IResultIterator resultIterator = sb.createQuery(map, new SearchRuntimeDetails(null, theJobUuid), null, RequestPartitionId.allPartitions());
storeResultsToFiles(nextCollection, sb, resultIterator, jobResourceCounter, jobStopwatch);
}

View File

@ -5,22 +5,27 @@ import ca.uhn.fhir.i18n.HapiLocalizer;
import ca.uhn.fhir.interceptor.api.IInterceptorService;
import ca.uhn.fhir.interceptor.executor.InterceptorService;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
import ca.uhn.fhir.jpa.bulk.BulkDataExportProvider;
import ca.uhn.fhir.jpa.bulk.BulkDataExportSvcImpl;
import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.dao.HistoryBuilder;
import ca.uhn.fhir.jpa.dao.HistoryBuilderFactory;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.dao.index.DaoResourceLinkResolver;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.graphql.JpaStorageServices;
import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperService;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl;
import ca.uhn.fhir.jpa.partition.PartitionManagementProvider;
import ca.uhn.fhir.jpa.partition.RequestPartitionHelperService;
import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
import ca.uhn.fhir.jpa.sched.AutowiringSpringBeanJobFactory;
@ -44,6 +49,7 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
@ -64,6 +70,9 @@ import org.springframework.scheduling.concurrent.ConcurrentTaskScheduler;
import org.springframework.scheduling.concurrent.ScheduledExecutorFactoryBean;
import org.springframework.web.socket.config.annotation.WebSocketConfigurer;
import javax.annotation.Nullable;
import java.util.Date;
/*
* #%L
* HAPI FHIR JPA Server
@ -103,9 +112,12 @@ public abstract class BaseConfig {
public static final String JPA_VALIDATION_SUPPORT_CHAIN = "myJpaValidationSupportChain";
public static final String TASK_EXECUTOR_NAME = "hapiJpaTaskExecutor";
public static final String GRAPHQL_PROVIDER_NAME = "myGraphQLProvider";
private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI";
public static final String PERSISTED_JPA_BUNDLE_PROVIDER = "PersistedJpaBundleProvider";
public static final String PERSISTED_JPA_BUNDLE_PROVIDER_BY_SEARCH = "PersistedJpaBundleProvider_BySearch";
public static final String PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER = "PersistedJpaSearchFirstPageBundleProvider";
private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI";
public static final String SEARCH_BUILDER = "SearchBuilder";
public static final String HISTORY_BUILDER = "HistoryBuilder";
@Autowired
protected Environment myEnv;
@ -213,8 +225,8 @@ public abstract class BaseConfig {
}
@Bean
public IRequestPartitionHelperService requestPartitionHelperService() {
return new RequestPartitionHelperService();
public IRequestPartitionHelperSvc requestPartitionHelperService() {
return new RequestPartitionHelperSvc();
}
@Bean
@ -291,18 +303,46 @@ public abstract class BaseConfig {
return new PersistedJpaBundleProviderFactory();
}
@Bean(name= PERSISTED_JPA_BUNDLE_PROVIDER)
@Bean(name = PERSISTED_JPA_BUNDLE_PROVIDER)
@Scope("prototype")
public PersistedJpaBundleProvider persistedJpaBundleProvider(RequestDetails theRequest, String theUuid) {
return new PersistedJpaBundleProvider(theRequest, theUuid);
}
@Bean(name= PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER)
@Bean(name = PERSISTED_JPA_BUNDLE_PROVIDER_BY_SEARCH)
@Scope("prototype")
public PersistedJpaBundleProvider persistedJpaBundleProvider(RequestDetails theRequest, Search theSearch) {
return new PersistedJpaBundleProvider(theRequest, theSearch);
}
@Bean(name = PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER)
@Scope("prototype")
public PersistedJpaSearchFirstPageBundleProvider persistedJpaSearchFirstPageBundleProvider(RequestDetails theRequest, Search theSearch, SearchCoordinatorSvcImpl.SearchTask theSearchTask, ISearchBuilder theSearchBuilder) {
return new PersistedJpaSearchFirstPageBundleProvider(theSearch, theSearchTask, theSearchBuilder, theRequest);
}
@Bean
public SearchBuilderFactory searchBuilderFactory() {
return new SearchBuilderFactory();
}
@Bean(name = SEARCH_BUILDER)
@Scope("prototype")
public SearchBuilder persistedJpaSearchFirstPageBundleProvider(IDao theDao, String theResourceName, Class<? extends IBaseResource> theResourceType) {
return new SearchBuilder(theDao, theResourceName, theResourceType);
}
@Bean
public HistoryBuilderFactory historyBuilderFactory() {
return new HistoryBuilderFactory();
}
@Bean(name = HISTORY_BUILDER)
@Scope("prototype")
public HistoryBuilder persistedJpaSearchFirstPageBundleProvider(@Nullable String theResourceType, @Nullable Long theResourceId, @Nullable Date theRangeStartInclusive, @Nullable Date theRangeEndInclusive) {
return new HistoryBuilder(theResourceType, theResourceId, theRangeStartInclusive, theRangeEndInclusive);
}
public static void configureEntityManagerFactory(LocalContainerEntityManagerFactoryBean theFactory, FhirContext theCtx) {
theFactory.setJpaDialect(hibernateJpaDialect(theCtx.getLocalizer()));
// TODO: Looking at moving the lastn entities into jpa.model.entity package. Note that moving the lastn entities may require re-building elasticsearch indexes.

View File

@ -11,6 +11,7 @@ import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao;
@ -26,15 +27,28 @@ import ca.uhn.fhir.jpa.dao.index.DaoSearchParamSynchronizer;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.dao.index.SearchParamWithInlineReferencesExtractor;
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.*;
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.ForcedId;
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
@ -76,7 +90,16 @@ import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseCoding;
import org.hl7.fhir.instance.model.api.IBaseHasExtensions;
import org.hl7.fhir.instance.model.api.IBaseMetaType;
import org.hl7.fhir.instance.model.api.IBaseReference;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IDomainResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -100,8 +123,19 @@ import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import javax.xml.stream.events.Characters;
import javax.xml.stream.events.XMLEvent;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.UUID;
import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
import static org.apache.commons.lang3.StringUtils.defaultString;
@ -164,14 +198,17 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
protected IResourceTableDao myResourceTableDao;
@Autowired
protected IResourceTagDao myResourceTagDao;
@Autowired
protected DeleteConflictService myDeleteConflictService;
@Autowired
protected IInterceptorBroadcaster myInterceptorBroadcaster;
@Autowired
protected DaoRegistry myDaoRegistry;
@Autowired
ExpungeService myExpungeService;
@Autowired
private HistoryBuilderFactory myHistoryBuilderFactory;
@Autowired
private DaoConfig myConfig;
@Autowired
private PlatformTransactionManager myPlatformTransactionManager;
@ -180,8 +217,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
@Autowired
private ISearchParamPresenceSvc mySearchParamPresenceSvc;
@Autowired
protected DaoRegistry myDaoRegistry;
@Autowired
private SearchParamWithInlineReferencesExtractor mySearchParamWithInlineReferencesExtractor;
@Autowired
private DaoSearchParamSynchronizer myDaoSearchParamSynchronizer;
@ -191,6 +226,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
private ApplicationContext myApplicationContext;
@Autowired
private PartitionSettings myPartitionSettings;
@Autowired
private RequestPartitionHelperSvc myRequestPartitionHelperSvc;
@Autowired
private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory;
@Autowired
private IPartitionLookupSvc myPartitionLookupSvc;
@Override
protected IInterceptorBroadcaster getInterceptorBroadcaster() {
@ -384,48 +425,23 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
}
}
protected IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive) {
protected IBundleProvider history(RequestDetails theRequest, String theResourceName, Long theId, Date theSince, Date theUntil) {
String resourceName = defaultIfBlank(theResourceName, null);
String resourceName = defaultIfBlank(theResourceType, null);
Search search = new Search();
search.setDeleted(false);
search.setCreated(new Date());
search.setLastUpdated(theSince, theUntil);
search.setLastUpdated(theRangeStartInclusive, theRangeEndInclusive);
search.setUuid(UUID.randomUUID().toString());
search.setResourceType(resourceName);
search.setResourceId(theId);
search.setResourceId(theResourcePid);
search.setSearchType(SearchTypeEnum.HISTORY);
search.setStatus(SearchStatusEnum.FINISHED);
if (theSince != null) {
if (resourceName == null) {
search.setTotalCount(myResourceHistoryTableDao.countForAllResourceTypes(theSince));
} else if (theId == null) {
search.setTotalCount(myResourceHistoryTableDao.countForResourceType(resourceName, theSince));
} else {
search.setTotalCount(myResourceHistoryTableDao.countForResourceInstance(theId, theSince));
}
} else {
if (resourceName == null) {
search.setTotalCount(myResourceHistoryTableDao.countForAllResourceTypes());
} else if (theId == null) {
search.setTotalCount(myResourceHistoryTableDao.countForResourceType(resourceName));
} else {
search.setTotalCount(myResourceHistoryTableDao.countForResourceInstance(theId));
}
}
search = mySearchCacheSvc.save(search);
return myPersistedJpaBundleProviderFactory.newInstance(theRequest, search.getUuid());
return myPersistedJpaBundleProviderFactory.newInstance(theRequest, search);
}
@Autowired
private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory;
void incrementId(T theResource, ResourceTable theSavedEntity, IIdType theResourceId) {
String newVersion;
long newVersionLong;
@ -807,7 +823,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
resourceBytes = history.getResource();
resourceEncoding = history.getEncoding();
myTagList = history.getTags();
if (history.isHasTags()) {
myTagList = history.getTags();
} else {
myTagList = Collections.emptyList();
}
version = history.getVersion();
if (history.getProvenance() != null) {
provenanceRequestId = history.getProvenance().getRequestId();
@ -829,7 +849,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
}
resourceBytes = history.getResource();
resourceEncoding = history.getEncoding();
myTagList = resource.getTags();
if (resource.isHasTags()) {
myTagList = resource.getTags();
} else {
myTagList = Collections.emptyList();
}
version = history.getVersion();
if (history.getProvenance() != null) {
provenanceRequestId = history.getProvenance().getRequestId();
@ -924,6 +948,17 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
}
// 7. Add partition information
if (myPartitionSettings.isPartitioningEnabled()) {
RequestPartitionId partitionId = theEntity.getPartitionId();
if (partitionId != null && partitionId.getPartitionId() != null) {
PartitionEntity persistedPartition = myPartitionLookupSvc.getPartitionById(partitionId.getPartitionId());
retVal.setUserData(Constants.RESOURCE_PARTITION_ID, persistedPartition.toRequestPartitionId());
} else {
retVal.setUserData(Constants.RESOURCE_PARTITION_ID, null);
}
}
return retVal;
}

View File

@ -45,7 +45,7 @@ import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperService;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
@ -145,7 +145,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
private String myResourceName;
private Class<T> myResourceType;
@Autowired
private IRequestPartitionHelperService myRequestPartitionHelperService;
private IRequestPartitionHelperSvc myRequestPartitionHelperService;
@Autowired
private PartitionSettings myPartitionSettings;
@ -216,7 +216,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
theResource.setUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED, Boolean.TRUE);
}
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource);
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource, getResourceName());
return doCreate(theResource, theIfNoneExist, thePerformIndexing, theUpdateTimestamp, theRequestDetails, requestPartitionId);
}
@ -685,11 +685,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Override
public IBundleProvider history(Date theSince, Date theUntil, RequestDetails theRequestDetails) {
if (myPartitionSettings.isPartitioningEnabled()) {
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "noSystemOrTypeHistoryForPartitionAwareServer");
throw new MethodNotAllowedException(msg);
}
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.HISTORY_TYPE, requestDetails);
@ -1005,19 +1000,19 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
public BaseHasResource readEntity(IIdType theId, boolean theCheckForForcedId, RequestDetails theRequest) {
validateResourceTypeAndThrowInvalidRequestException(theId);
@Nullable RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, getResourceName());
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, getResourceName());
ResourcePersistentId pid = myIdHelperService.resolveResourcePersistentIds(requestPartitionId, getResourceName(), theId.getIdPart());
BaseHasResource entity = myEntityManager.find(ResourceTable.class, pid.getIdAsLong());
// Verify that the resource is for the correct partition
if (requestPartitionId != null) {
if (!requestPartitionId.isAllPartitions()) {
if (requestPartitionId.getPartitionId() == null) {
if (entity.getPartitionId() != null) {
if (entity.getPartitionId().getPartitionId() != null) {
ourLog.debug("Performing a read for PartitionId={} but entity has partition: {}", requestPartitionId, entity.getPartitionId());
entity = null;
}
} else if (entity.getPartitionId() != null) {
if (!entity.getPartitionId().getPartitionId().equals(requestPartitionId.getPartitionId())) {
} else if (entity.getPartitionId().getPartitionId() != null) {
if (!requestPartitionId.getPartitionId().equals(entity.getPartitionId().getPartitionId())) {
ourLog.debug("Performing a read for PartitionId={} but entity has partition: {}", requestPartitionId, entity.getPartitionId());
entity = null;
}
@ -1314,7 +1309,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
*/
resourceId = theResource.getIdElement();
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequest, theResource);
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequest, theResource, getResourceName());
try {
entity = readEntityLatestVersion(resourceId, requestPartitionId);
} catch (ResourceNotFoundException e) {

View File

@ -8,7 +8,6 @@ import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.util.StopWatch;
import org.hl7.fhir.instance.model.api.IBaseResource;
@ -80,11 +79,6 @@ public abstract class BaseHapiFhirSystemDao<T, MT> extends BaseHapiFhirDao<IBase
@Override
public IBundleProvider history(Date theSince, Date theUntil, RequestDetails theRequestDetails) {
if (myPartitionSettings.isPartitioningEnabled()) {
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "noSystemOrTypeHistoryForPartitionAwareServer");
throw new MethodNotAllowedException(msg);
}
if (theRequestDetails != null) {
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(theRequestDetails);

View File

@ -27,7 +27,7 @@ import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperService;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.api.Constants;
@ -282,7 +282,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
}
@Autowired
private IRequestPartitionHelperService myRequestPartitionHelperService;
private IRequestPartitionHelperSvc myRequestPartitionHelperService;
@Autowired
private PartitionSettings myPartitionSettings;

View File

@ -0,0 +1,183 @@
package ca.uhn.fhir.jpa.dao;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2020 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.Multimaps;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nullable;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.JoinType;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static ca.uhn.fhir.jpa.dao.SearchBuilder.toPredicateArray;
/**
* The HistoryBuilder is responsible for building history queries
*/
public class HistoryBuilder {
private static final Logger ourLog = LoggerFactory.getLogger(HistoryBuilder.class);
private final String myResourceType;
private final Long myResourceId;
private final Date myRangeStartInclusive;
private final Date myRangeEndInclusive;
@Autowired
protected IInterceptorBroadcaster myInterceptorBroadcaster;
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager;
@Autowired
private PartitionSettings myPartitionSettings;
@Autowired
private FhirContext myCtx;
@Autowired
private IdHelperService myIdHelperService;
/**
* Constructor
*/
public HistoryBuilder(@Nullable String theResourceType, @Nullable Long theResourceId, @Nullable Date theRangeStartInclusive, @Nullable Date theRangeEndInclusive) {
myResourceType = theResourceType;
myResourceId = theResourceId;
myRangeStartInclusive = theRangeStartInclusive;
myRangeEndInclusive = theRangeEndInclusive;
}
public Long fetchCount(RequestPartitionId thePartitionId) {
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> criteriaQuery = cb.createQuery(Long.class);
Root<ResourceHistoryTable> from = criteriaQuery.from(ResourceHistoryTable.class);
criteriaQuery.select(cb.count(from));
addPredicatesToQuery(cb, thePartitionId, criteriaQuery, from);
TypedQuery<Long> query = myEntityManager.createQuery(criteriaQuery);
return query.getSingleResult();
}
@SuppressWarnings("OptionalIsPresent")
public List<ResourceHistoryTable> fetchEntities(RequestPartitionId thePartitionId, int theFromIndex, int theToIndex) {
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
CriteriaQuery<ResourceHistoryTable> criteriaQuery = cb.createQuery(ResourceHistoryTable.class);
Root<ResourceHistoryTable> from = criteriaQuery.from(ResourceHistoryTable.class);
addPredicatesToQuery(cb, thePartitionId, criteriaQuery, from);
from.fetch("myProvenance", JoinType.LEFT);
criteriaQuery.orderBy(cb.desc(from.get("myUpdated")));
TypedQuery<ResourceHistoryTable> query = myEntityManager.createQuery(criteriaQuery);
query.setFirstResult(theFromIndex);
query.setMaxResults(theToIndex - theFromIndex);
List<ResourceHistoryTable> tables = query.getResultList();
if (tables.size() > 0) {
ImmutableListMultimap<Long, ResourceHistoryTable> resourceIdToHistoryEntries = Multimaps.index(tables, ResourceHistoryTable::getResourceId);
Map<Long, Optional<String>> pidToForcedId = myIdHelperService.translatePidsToForcedIds(resourceIdToHistoryEntries.keySet());
ourLog.trace("Translated IDs: {}", pidToForcedId);
for (Long nextResourceId : resourceIdToHistoryEntries.keySet()) {
List<ResourceHistoryTable> historyTables = resourceIdToHistoryEntries.get(nextResourceId);
String resourceId;
Optional<String> forcedId = pidToForcedId.get(nextResourceId);
if (forcedId.isPresent()) {
resourceId = forcedId.get();
} else {
resourceId = nextResourceId.toString();
}
for (ResourceHistoryTable nextHistoryTable : historyTables) {
nextHistoryTable.setTransientForcedId(resourceId);
}
}
}
return tables;
}
private void addPredicatesToQuery(CriteriaBuilder theCriteriaBuilder, RequestPartitionId thePartitionId, CriteriaQuery<?> theQuery, Root<ResourceHistoryTable> theFrom) {
List<Predicate> predicates = new ArrayList<>();
if (!thePartitionId.isAllPartitions()) {
if (thePartitionId.getPartitionId() != null) {
predicates.add(theCriteriaBuilder.equal(theFrom.get("myPartitionIdValue").as(Integer.class), thePartitionId.getPartitionId()));
} else {
predicates.add(theCriteriaBuilder.isNull(theFrom.get("myPartitionIdValue").as(Integer.class)));
}
}
if (myResourceId != null) {
predicates.add(theCriteriaBuilder.equal(theFrom.get("myResourceId"), myResourceId));
} else if (myResourceType != null) {
validateNotSearchingAllPartitions(thePartitionId);
predicates.add(theCriteriaBuilder.equal(theFrom.get("myResourceType"), myResourceType));
} else {
validateNotSearchingAllPartitions(thePartitionId);
}
if (myRangeStartInclusive != null) {
predicates.add(theCriteriaBuilder.greaterThanOrEqualTo(theFrom.get("myUpdated").as(Date.class), myRangeStartInclusive));
}
if (myRangeEndInclusive != null) {
predicates.add(theCriteriaBuilder.lessThanOrEqualTo(theFrom.get("myUpdated").as(Date.class), myRangeEndInclusive));
}
if (predicates.size() > 0) {
theQuery.where(toPredicateArray(predicates));
}
}
private void validateNotSearchingAllPartitions(RequestPartitionId thePartitionId) {
if (myPartitionSettings.isPartitioningEnabled()) {
if (thePartitionId.isAllPartitions()) {
String msg = myCtx.getLocalizer().getMessage(HistoryBuilder.class, "noSystemOrTypeHistoryForPartitionAwareServer");
throw new InvalidRequestException(msg);
}
}
}
}

View File

@ -0,0 +1,39 @@
package ca.uhn.fhir.jpa.dao;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2020 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.config.BaseConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import javax.annotation.Nullable;
import java.util.Date;
public class HistoryBuilderFactory {
@Autowired
private ApplicationContext myApplicationContext;
public HistoryBuilder newHistoryBuilder(@Nullable String theResourceType, @Nullable Long theResourceId, @Nullable Date theRangeStartInclusive, @Nullable Date theRangeEndInclusive) {
return (HistoryBuilder) myApplicationContext.getBean(BaseConfig.HISTORY_BUILDER, theResourceType, theResourceId, theRangeStartInclusive, theRangeEndInclusive);
}
}

View File

@ -30,6 +30,7 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.DateRangeParam;
import org.hl7.fhir.instance.model.api.IBaseResource;
import javax.annotation.Nonnull;
import javax.persistence.EntityManager;
import java.util.Collection;
import java.util.Iterator;
@ -38,7 +39,7 @@ import java.util.Set;
public interface ISearchBuilder {
IResultIterator createQuery(SearchParameterMap theParams, SearchRuntimeDetails theSearchRuntime, RequestDetails theRequest, RequestPartitionId theRequestPartitionId);
IResultIterator createQuery(SearchParameterMap theParams, SearchRuntimeDetails theSearchRuntime, RequestDetails theRequest, @Nonnull RequestPartitionId theRequestPartitionId);
Iterator<Long> createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest, RequestPartitionId theRequestPartitionId);

View File

@ -57,6 +57,7 @@ import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper;
import ca.uhn.fhir.jpa.util.BaseIterator;
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
import ca.uhn.fhir.jpa.util.SqlQueryList;
import ca.uhn.fhir.model.api.IQueryParameterType;
@ -89,8 +90,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import javax.annotation.Nonnull;
import javax.persistence.EntityManager;
@ -124,8 +123,6 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
* The SearchBuilder is responsible for actually forming the SQL query that handles
* searches for resources
*/
@Component
@Scope("prototype")
public class SearchBuilder implements ISearchBuilder {
/**
@ -179,7 +176,7 @@ public class SearchBuilder implements ISearchBuilder {
/**
* Constructor
*/
SearchBuilder(IDao theDao, String theResourceName, Class<? extends IBaseResource> theResourceType) {
public SearchBuilder(IDao theDao, String theResourceName, Class<? extends IBaseResource> theResourceType) {
myCallingDao = theDao;
myResourceName = theResourceName;
myResourceType = theResourceType;
@ -229,7 +226,9 @@ public class SearchBuilder implements ISearchBuilder {
}
@Override
public Iterator<Long> createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
public Iterator<Long> createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest, @Nonnull RequestPartitionId theRequestPartitionId) {
assert theRequestPartitionId != null;
init(theParams, theSearchUuid, theRequestPartitionId);
TypedQuery<Long> query = createQuery(null, null, true, theRequest);
@ -245,7 +244,9 @@ public class SearchBuilder implements ISearchBuilder {
}
@Override
public IResultIterator createQuery(SearchParameterMap theParams, SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
public IResultIterator createQuery(SearchParameterMap theParams, SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest, @Nonnull RequestPartitionId theRequestPartitionId) {
assert theRequestPartitionId != null;
init(theParams, theSearchRuntimeDetails.getSearchUuid(), theRequestPartitionId);
if (myPidSet == null) {
@ -386,7 +387,7 @@ public class SearchBuilder implements ISearchBuilder {
myQueryRoot.addPredicate(myCriteriaBuilder.equal(myQueryRoot.get("myResourceType"), myResourceName));
}
myQueryRoot.addPredicate(myCriteriaBuilder.isNull(myQueryRoot.get("myDeleted")));
if (myRequestPartitionId != null) {
if (!myRequestPartitionId.isAllPartitions()) {
if (myRequestPartitionId.getPartitionId() != null) {
myQueryRoot.addPredicate(myCriteriaBuilder.equal(myQueryRoot.get("myPartitionIdValue").as(Integer.class), myRequestPartitionId.getPartitionId()));
} else {
@ -685,19 +686,10 @@ public class SearchBuilder implements ISearchBuilder {
theResourceListToPopulate.add(null);
}
/*
* As always, Oracle can't handle things that other databases don't mind.. In this
* case it doesn't like more than ~1000 IDs in a single load, so we break this up
* if it's lots of IDs. I suppose maybe we should be doing this as a join anyhow
* but this should work too. Sigh.
*/
List<ResourcePersistentId> pids = new ArrayList<>(thePids);
for (int i = 0; i < pids.size(); i += MAXIMUM_PAGE_SIZE) {
int to = i + MAXIMUM_PAGE_SIZE;
to = Math.min(to, pids.size());
List<ResourcePersistentId> pidsSubList = pids.subList(i, to);
doLoadPids(pidsSubList, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position, theDetails);
}
new QueryChunker<ResourcePersistentId>().chunk(pids, t->{
doLoadPids(t, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position, theDetails);
});
}
@ -966,7 +958,7 @@ public class SearchBuilder implements ISearchBuilder {
private void addPredicateCompositeStringUnique(@Nonnull SearchParameterMap theParams, String theIndexedString, RequestPartitionId theRequestPartitionId) {
Join<ResourceTable, ResourceIndexedCompositeStringUnique> join = myQueryRoot.join("myParamsCompositeStringUnique", JoinType.LEFT);
if (theRequestPartitionId != null) {
if (!theRequestPartitionId.isAllPartitions()) {
Integer partitionId = theRequestPartitionId.getPartitionId();
Predicate predicate = myCriteriaBuilder.equal(join.get("myPartitionIdValue").as(Integer.class), partitionId);
myQueryRoot.addPredicate(predicate);
@ -1347,7 +1339,7 @@ public class SearchBuilder implements ISearchBuilder {
return ResourcePersistentId.fromLongList(query.getResultList());
}
private static Predicate[] toPredicateArray(List<Predicate> thePredicates) {
static Predicate[] toPredicateArray(List<Predicate> thePredicates) {
return thePredicates.toArray(new Predicate[0]);
}
}

View File

@ -21,12 +21,18 @@ package ca.uhn.fhir.jpa.dao;
*/
import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.config.BaseConfig;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.annotation.Lookup;
import org.springframework.stereotype.Service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
public class SearchBuilderFactory {
@Autowired
private ApplicationContext myApplicationContext;
public ISearchBuilder newSearchBuilder(IDao theDao, String theResourceName, Class<? extends IBaseResource> theResourceType) {
return (ISearchBuilder) myApplicationContext.getBean(BaseConfig.SEARCH_BUILDER, theDao, theResourceName, theResourceType);
}
@Service
public abstract class SearchBuilderFactory {
@Lookup
public abstract ISearchBuilder newSearchBuilder(IDao theDao, String theResourceName, Class<? extends IBaseResource> theResourceType);
}

View File

@ -1,5 +1,11 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.model.entity.ForcedId;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
@ -24,15 +30,11 @@ import java.util.Optional;
* #L%
*/
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.model.entity.ForcedId;
public interface IForcedIdDao extends JpaRepository<ForcedId, Long> {
@Query("SELECT f FROM ForcedId f WHERE myResourcePid IN (:resource_pids)")
List<ForcedId> findAllByResourcePid(@Param("resource_pids") List<Long> theResourcePids);
@Query("SELECT f.myResourcePid FROM ForcedId f WHERE myForcedId IN (:forced_id)")
List<Long> findByForcedId(@Param("forced_id") Collection<String> theForcedId);
@ -46,7 +48,7 @@ public interface IForcedIdDao extends JpaRepository<ForcedId, Long> {
Optional<Long> findByPartitionIdAndTypeAndForcedId(@Param("partition_id") Integer thePartitionId, @Param("resource_type") String theResourceType, @Param("forced_id") String theForcedId);
@Query("SELECT f FROM ForcedId f WHERE f.myResourcePid = :resource_pid")
ForcedId findByResourcePid(@Param("resource_pid") Long theResourcePid);
Optional<ForcedId> findByResourcePid(@Param("resource_pid") Long theResourcePid);
@Modifying
@Query("DELETE FROM ForcedId t WHERE t.myId = :pid")
@ -75,7 +77,7 @@ public interface IForcedIdDao extends JpaRepository<ForcedId, Long> {
/**
* Warning: No DB index exists for this particular query, so it may not perform well
*
* <p>
* This method returns a Collection where each row is an element in the collection. Each element in the collection
* is an object array, where the order matters (the array represents columns returned by the query). Be careful if you change this query in any way.
*/

View File

@ -6,13 +6,8 @@ import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.jpa.repository.Temporal;
import org.springframework.data.repository.query.Param;
import javax.persistence.TemporalType;
import java.util.Collection;
import java.util.Date;
/*
* #%L
* HAPI FHIR JPA Server
@ -35,36 +30,6 @@ import java.util.Date;
public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryTable, Long> {
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t WHERE t.myUpdated >= :cutoff")
int countForAllResourceTypes(
@Temporal(value = TemporalType.TIMESTAMP) @Param("cutoff") Date theCutoff
);
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t")
int countForAllResourceTypes(
);
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t WHERE t.myResourceId = :id AND t.myUpdated >= :cutoff")
int countForResourceInstance(
@Param("id") Long theId,
@Temporal(value = TemporalType.TIMESTAMP) @Param("cutoff") Date theCutoff
);
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t WHERE t.myResourceId = :id")
int countForResourceInstance(
@Param("id") Long theId
);
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t WHERE t.myResourceType = :type AND t.myUpdated >= :cutoff")
int countForResourceType(
@Param("type") String theType,
@Temporal(value = TemporalType.TIMESTAMP) @Param("cutoff") Date theCutoff
);
@Query("SELECT COUNT(*) FROM ResourceHistoryTable t WHERE t.myResourceType = :type")
int countForResourceType(
@Param("type") String theType
);
@Query("SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :id AND t.myResourceVersion = :version")
ResourceHistoryTable findForIdAndVersionAndFetchProvenance(@Param("id") long theId, @Param("version") long theVersion);

View File

@ -43,6 +43,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
@ -63,7 +64,7 @@ public class DaoResourceLinkResolver implements IResourceLinkResolver {
private DaoRegistry myDaoRegistry;
@Override
public IResourceLookup findTargetResource(RequestPartitionId theRequestPartitionId, RuntimeSearchParam theSearchParam, String theSourcePath, IIdType theSourceResourceId, String theResourceType, Class<? extends IBaseResource> theType, IBaseReference theReference, RequestDetails theRequest) {
public IResourceLookup findTargetResource(@Nonnull RequestPartitionId theRequestPartitionId, RuntimeSearchParam theSearchParam, String theSourcePath, IIdType theSourceResourceId, String theResourceType, Class<? extends IBaseResource> theType, IBaseReference theReference, RequestDetails theRequest) {
IResourceLookup resolvedResource;
String idPart = theSourceResourceId.getIdPart();
try {

View File

@ -21,7 +21,9 @@ package ca.uhn.fhir.jpa.dao.index;
*/
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndex;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
import ca.uhn.fhir.jpa.util.AddRemoveCount;
@ -41,18 +43,22 @@ public class DaoSearchParamSynchronizer {
protected EntityManager myEntityManager;
@Autowired
private DaoConfig myDaoConfig;
@Autowired
private PartitionSettings myPartitionSettings;
@Autowired
private ModelConfig myModelConfig;
public AddRemoveCount synchronizeSearchParamsToDatabase(ResourceIndexedSearchParams theParams, ResourceTable theEntity, ResourceIndexedSearchParams existingParams) {
AddRemoveCount retVal = new AddRemoveCount();
synchronize(theParams, theEntity, retVal, theParams.myStringParams, existingParams.myStringParams);
synchronize(theParams, theEntity, retVal, theParams.myTokenParams, existingParams.myTokenParams);
synchronize(theParams, theEntity, retVal, theParams.myNumberParams, existingParams.myNumberParams);
synchronize(theParams, theEntity, retVal, theParams.myQuantityParams, existingParams.myQuantityParams);
synchronize(theParams, theEntity, retVal, theParams.myDateParams, existingParams.myDateParams);
synchronize(theParams, theEntity, retVal, theParams.myUriParams, existingParams.myUriParams);
synchronize(theParams, theEntity, retVal, theParams.myCoordsParams, existingParams.myCoordsParams);
synchronize(theParams, theEntity, retVal, theParams.myLinks, existingParams.myLinks);
synchronize(theEntity, retVal, theParams.myStringParams, existingParams.myStringParams);
synchronize(theEntity, retVal, theParams.myTokenParams, existingParams.myTokenParams);
synchronize(theEntity, retVal, theParams.myNumberParams, existingParams.myNumberParams);
synchronize(theEntity, retVal, theParams.myQuantityParams, existingParams.myQuantityParams);
synchronize(theEntity, retVal, theParams.myDateParams, existingParams.myDateParams);
synchronize(theEntity, retVal, theParams.myUriParams, existingParams.myUriParams);
synchronize(theEntity, retVal, theParams.myCoordsParams, existingParams.myCoordsParams);
synchronize(theEntity, retVal, theParams.myLinks, existingParams.myLinks);
// make sure links are indexed
theEntity.setResourceLinks(theParams.myLinks);
@ -60,24 +66,26 @@ public class DaoSearchParamSynchronizer {
return retVal;
}
private <T extends BaseResourceIndex> void synchronize(ResourceIndexedSearchParams theParams, ResourceTable theEntity, AddRemoveCount theAddRemoveCount, Collection<T> theNewParms, Collection<T> theExistingParms) {
List<T> quantitiesToRemove = subtract(theExistingParms, theNewParms);
List<T> quantitiesToAdd = subtract(theNewParms, theExistingParms);
tryToReuseIndexEntities(quantitiesToRemove, quantitiesToAdd);
for (T next : quantitiesToRemove) {
private <T extends BaseResourceIndex> void synchronize(ResourceTable theEntity, AddRemoveCount theAddRemoveCount, Collection<T> theNewParams, Collection<T> theExistingParams) {
for (T next : theNewParams) {
next.setPartitionId(theEntity.getPartitionId());
next.calculateHashes();
}
List<T> paramsToRemove = subtract(theExistingParams, theNewParams);
List<T> paramsToAdd = subtract(theNewParams, theExistingParams);
tryToReuseIndexEntities(paramsToRemove, paramsToAdd);
for (T next : paramsToRemove) {
myEntityManager.remove(next);
theEntity.getParamsQuantity().remove(next);
}
for (T next : quantitiesToAdd) {
next.setPartitionId(theEntity.getPartitionId());
}
theParams.calculateHashes(theNewParms);
for (T next : quantitiesToAdd) {
for (T next : paramsToAdd) {
myEntityManager.merge(next);
}
theAddRemoveCount.addToAddCount(quantitiesToAdd.size());
theAddRemoveCount.addToRemoveCount(quantitiesToRemove.size());
theAddRemoveCount.addToAddCount(paramsToRemove.size());
theAddRemoveCount.addToRemoveCount(paramsToRemove.size());
}
/**
@ -106,6 +114,7 @@ public class DaoSearchParamSynchronizer {
// Take a row we were going to remove, and repurpose its ID
T entityToReuse = theIndexesToRemove.remove(theIndexesToRemove.size() - 1);
entityToReuse.copyMutableValuesFrom(targetEntity);
entityToReuse.calculateHashes();
theIndexesToAdd.set(addIndex, entityToReuse);
}
}

View File

@ -30,6 +30,7 @@ import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.jpa.model.cross.ResourceLookup;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.entity.ForcedId;
import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
@ -42,23 +43,22 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.IncorrectResultSizeDataAccessException;
import org.springframework.stereotype.Service;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@ -84,7 +84,6 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
*/
@Service
public class IdHelperService {
private static final Logger ourLog = LoggerFactory.getLogger(IdHelperService.class);
@Autowired
protected IForcedIdDao myForcedIdDao;
@ -99,11 +98,13 @@ public class IdHelperService {
private Cache<String, Long> myPersistentIdCache;
private Cache<String, IResourceLookup> myResourceLookupCache;
private Cache<Long, Optional<String>> myForcedIdCache;
@PostConstruct
public void start() {
myPersistentIdCache = newCache();
myResourceLookupCache = newCache();
myForcedIdCache = newCache();
}
@ -118,7 +119,7 @@ public class IdHelperService {
* @throws ResourceNotFoundException If the ID can not be found
*/
@Nonnull
public IResourceLookup resolveResourceIdentity(RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId, RequestDetails theRequestDetails) throws ResourceNotFoundException {
public IResourceLookup resolveResourceIdentity(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId, RequestDetails theRequestDetails) throws ResourceNotFoundException {
// We only pass 1 input in so only 0..1 will come back
IdDt id = new IdDt(theResourceType, theResourceId);
Collection<IResourceLookup> matches = translateForcedIdToPids(theRequestPartitionId, theRequestDetails, Collections.singletonList(id));
@ -135,7 +136,7 @@ public class IdHelperService {
* @throws ResourceNotFoundException If the ID can not be found
*/
@Nonnull
public ResourcePersistentId resolveResourcePersistentIds(RequestPartitionId theRequestPartitionId, String theResourceType, String theId) {
public ResourcePersistentId resolveResourcePersistentIds(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) {
Long retVal;
if (myDaoConfig.getResourceClientIdStrategy() == DaoConfig.ClientIdStrategyEnum.ANY || !isValidPid(theId)) {
if (myDaoConfig.isDeleteEnabled()) {
@ -159,7 +160,7 @@ public class IdHelperService {
* are deleted (but note that forced IDs can't change, so the cache can't return incorrect results)
*/
@Nonnull
public List<ResourcePersistentId> resolveResourcePersistentIdsWithCache(RequestPartitionId theRequestPartitionId, List<IIdType> theIds, RequestDetails theRequest) {
public List<ResourcePersistentId> resolveResourcePersistentIdsWithCache(RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
theIds.forEach(id -> Validate.isTrue(id.hasIdPart()));
if (theIds.isEmpty()) {
@ -202,14 +203,14 @@ public class IdHelperService {
if (nextIds.size() > 0) {
Collection<Object[]> views;
if (theRequestPartitionId != null) {
if (theRequestPartitionId.isAllPartitions()) {
views = myForcedIdDao.findByTypeAndForcedId(nextResourceType, nextIds);
} else {
if (theRequestPartitionId.getPartitionId() != null) {
views = myForcedIdDao.findByTypeAndForcedIdInPartition(nextResourceType, nextIds, theRequestPartitionId.getPartitionId());
} else {
views = myForcedIdDao.findByTypeAndForcedIdInPartitionNull(nextResourceType, nextIds);
}
} else {
views = myForcedIdDao.findByTypeAndForcedId(nextResourceType, nextIds);
}
for (Object[] nextView : views) {
String forcedId = (String) nextView[0];
@ -233,17 +234,20 @@ public class IdHelperService {
@Nonnull
public IIdType translatePidIdToForcedId(FhirContext theCtx, String theResourceType, ResourcePersistentId theId) {
IIdType retVal = theCtx.getVersion().newIdType();
retVal.setValue(translatePidIdToForcedId(theResourceType, theId));
Optional<String> forcedId = translatePidIdToForcedId(theId);
if (forcedId.isPresent()) {
retVal.setValue(theResourceType + '/' + forcedId.get());
} else {
retVal.setValue(theResourceType + '/' + theId.toString());
}
return retVal;
}
private String translatePidIdToForcedId(String theResourceType, ResourcePersistentId theId) {
ForcedId forcedId = myForcedIdDao.findByResourcePid(theId.getIdAsLong());
if (forcedId != null) {
return forcedId.getResourceType() + '/' + forcedId.getForcedId();
} else {
return theResourceType + '/' + theId.toString();
}
public Optional<String> translatePidIdToForcedId(ResourcePersistentId theId) {
return myForcedIdCache.get(theId.getIdAsLong(), pid -> myForcedIdDao.findByResourcePid(pid).map(t -> t.getForcedId()));
}
private ListMultimap<String, String> organizeIdsByResourceType(Collection<IIdType> theIds) {
@ -260,15 +264,9 @@ public class IdHelperService {
return typeToIds;
}
private Long resolveResourceIdentity(@Nullable RequestPartitionId theRequestPartitionId, @Nonnull String theResourceType, @Nonnull String theId) {
private Long resolveResourceIdentity(@Nonnull RequestPartitionId theRequestPartitionId, @Nonnull String theResourceType, @Nonnull String theId) {
Optional<Long> pid;
if (theRequestPartitionId != null) {
if (theRequestPartitionId.getPartitionId() == null) {
pid = myForcedIdDao.findByPartitionIdNullAndTypeAndForcedId(theResourceType, theId);
} else {
pid = myForcedIdDao.findByPartitionIdAndTypeAndForcedId(theRequestPartitionId.getPartitionId(), theResourceType, theId);
}
} else {
if (theRequestPartitionId.isAllPartitions()) {
try {
pid = myForcedIdDao.findByTypeAndForcedId(theResourceType, theId);
} catch (IncorrectResultSizeDataAccessException e) {
@ -280,6 +278,12 @@ public class IdHelperService {
String msg = myFhirCtx.getLocalizer().getMessage(IdHelperService.class, "nonUniqueForcedId");
throw new PreconditionFailedException(msg);
}
} else {
if (theRequestPartitionId.getPartitionId() == null) {
pid = myForcedIdDao.findByPartitionIdNullAndTypeAndForcedId(theResourceType, theId);
} else {
pid = myForcedIdDao.findByPartitionIdAndTypeAndForcedId(theRequestPartitionId.getPartitionId(), theResourceType, theId);
}
}
if (!pid.isPresent()) {
@ -288,7 +292,7 @@ public class IdHelperService {
return pid.get();
}
private Collection<IResourceLookup> translateForcedIdToPids(RequestPartitionId theRequestPartitionId, RequestDetails theRequest, Collection<IIdType> theId) {
private Collection<IResourceLookup> translateForcedIdToPids(@Nonnull RequestPartitionId theRequestPartitionId, RequestDetails theRequest, Collection<IIdType> theId) {
theId.forEach(id -> Validate.isTrue(id.hasIdPart()));
if (theId.isEmpty()) {
@ -329,14 +333,14 @@ public class IdHelperService {
Collection<Object[]> views;
assert isNotBlank(nextResourceType);
if (theRequestPartitionId != null) {
if (theRequestPartitionId.isAllPartitions()) {
views = myForcedIdDao.findAndResolveByForcedIdWithNoType(nextResourceType, nextIds);
} else {
if (theRequestPartitionId.getPartitionId() != null) {
views = myForcedIdDao.findAndResolveByForcedIdWithNoTypeInPartition(nextResourceType, nextIds, theRequestPartitionId.getPartitionId());
} else {
views = myForcedIdDao.findAndResolveByForcedIdWithNoTypeInPartitionNull(nextResourceType, nextIds);
}
} else {
views = myForcedIdDao.findAndResolveByForcedIdWithNoType(nextResourceType, nextIds);
}
for (Object[] next : views) {
@ -359,16 +363,16 @@ public class IdHelperService {
return retVal;
}
private void resolvePids(RequestPartitionId theRequestPartitionId, List<Long> thePidsToResolve, List<IResourceLookup> theTarget) {
private void resolvePids(@Nonnull RequestPartitionId theRequestPartitionId, List<Long> thePidsToResolve, List<IResourceLookup> theTarget) {
Collection<Object[]> lookup;
if (theRequestPartitionId != null) {
if (theRequestPartitionId.isAllPartitions()) {
lookup = myResourceTableDao.findLookupFieldsByResourcePid(thePidsToResolve);
} else {
if (theRequestPartitionId.getPartitionId() != null) {
lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartition(thePidsToResolve, theRequestPartitionId.getPartitionId());
} else {
lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionNull(thePidsToResolve);
}
} else {
lookup = myResourceTableDao.findLookupFieldsByResourcePid(thePidsToResolve);
}
lookup
.stream()
@ -379,6 +383,7 @@ public class IdHelperService {
public void clearCache() {
myPersistentIdCache.invalidateAll();
myResourceLookupCache.invalidateAll();
myForcedIdCache.invalidateAll();
}
private <T, V> @NonNull Cache<T, V> newCache() {
@ -389,6 +394,38 @@ public class IdHelperService {
.build();
}
public Map<Long, Optional<String>> translatePidsToForcedIds(Set<Long> thePids) {
Map<Long, Optional<String>> retVal = new HashMap<>(myForcedIdCache.getAllPresent(thePids));
List<Long> remainingPids = thePids
.stream()
.filter(t -> !retVal.containsKey(t))
.collect(Collectors.toList());
new QueryChunker<Long>().chunk(remainingPids, t -> {
List<ForcedId> forcedIds = myForcedIdDao.findAllByResourcePid(t);
for (ForcedId forcedId : forcedIds) {
Long nextResourcePid = forcedId.getResourceId();
Optional<String> nextForcedId = Optional.of(forcedId.getForcedId());
retVal.put(nextResourcePid, nextForcedId);
myForcedIdCache.put(nextResourcePid, nextForcedId);
}
});
remainingPids = thePids
.stream()
.filter(t -> !retVal.containsKey(t))
.collect(Collectors.toList());
for (Long nextResourcePid : remainingPids) {
retVal.put(nextResourcePid, Optional.empty());
myForcedIdCache.put(nextResourcePid, Optional.empty());
}
return retVal;
}
public static boolean isValidPid(IIdType theId) {
if (theId == null) {
return false;

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.dao.index;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
@ -96,7 +97,14 @@ public class SearchParamWithInlineReferencesExtractor {
public void populateFromResource(ResourceIndexedSearchParams theParams, Date theUpdateTime, ResourceTable theEntity, IBaseResource theResource, ResourceIndexedSearchParams theExistingParams, RequestDetails theRequest) {
extractInlineReferences(theResource, theRequest);
mySearchParamExtractorService.extractFromResource(theEntity.getPartitionId(), theRequest, theParams, theEntity, theResource, theUpdateTime, true);
RequestPartitionId partitionId;
if (myPartitionSettings.isPartitioningEnabled()) {
partitionId = theEntity.getPartitionId();
} else {
partitionId = RequestPartitionId.allPartitions();
}
mySearchParamExtractorService.extractFromResource(partitionId, theRequest, theParams, theEntity, theResource, theUpdateTime, true);
Set<Map.Entry<String, RuntimeSearchParam>> activeSearchParams = mySearchParamRegistry.getActiveSearchParams(theEntity.getResourceType()).entrySet();
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.ENABLED) {

View File

@ -131,7 +131,7 @@ abstract class BasePredicateBuilder {
}
void addPredicateParamMissingForNonReference(String theResourceName, String theParamName, boolean theMissing, Join<ResourceTable, ? extends BaseResourceIndexedSearchParam> theJoin, RequestPartitionId theRequestPartitionId) {
if (theRequestPartitionId != null) {
if (!theRequestPartitionId.isAllPartitions()) {
if (theRequestPartitionId.getPartitionId() != null) {
myQueryRoot.addPredicate(myCriteriaBuilder.equal(theJoin.get("myPartitionIdValue"), theRequestPartitionId.getPartitionId()));
} else {
@ -224,7 +224,7 @@ abstract class BasePredicateBuilder {
}
void addPartitionIdPredicate(RequestPartitionId theRequestPartitionId, From<?, ? extends BasePartitionable> theJoin, List<Predicate> theCodePredicates) {
if (theRequestPartitionId != null) {
if (!theRequestPartitionId.isAllPartitions()) {
Integer partitionId = theRequestPartitionId.getPartitionId();
Predicate partitionPredicate;
if (partitionId != null) {

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.dao.SearchBuilder;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
@ -150,67 +151,95 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
return p;
}
private boolean isNullOrDayPrecision(DateParam theDateParam) {
return theDateParam == null || theDateParam.getPrecision().ordinal() == TemporalPrecisionEnum.DAY.ordinal();
}
private Predicate createPredicateDateFromRange(CriteriaBuilder theBuilder,
From<?, ResourceIndexedSearchParamDate> theFrom,
DateRangeParam theRange,
SearchFilterParser.CompareOperation operation) {
Date lowerBound = theRange.getLowerBoundAsInstant();
Date upperBound = theRange.getUpperBoundAsInstant();
Predicate lt;
Predicate gt;
Date lowerBoundInstant = theRange.getLowerBoundAsInstant();
Date upperBoundInstant = theRange.getUpperBoundAsInstant();
DateParam lowerBound = theRange.getLowerBound();
DateParam upperBound = theRange.getUpperBound();
Integer lowerBoundAsOrdinal = theRange.getLowerBoundAsDateInteger();
Integer upperBoundAsOrdinal = theRange.getUpperBoundAsDateInteger();
Comparable genericLowerBound;
Comparable genericUpperBound;
/**
* If all present search parameters are of DAY precision, and {@link DaoConfig#getUseOrdinalDatesForDayPrecisionSearches()} is true,
* then we attempt to use the ordinal field for date comparisons instead of the date field.
*/
boolean isOrdinalComparison = isNullOrDayPrecision(lowerBound) && isNullOrDayPrecision(upperBound) && myDaoConfig.getModelConfig().getUseOrdinalDatesForDayPrecisionSearches();
Predicate lt = null;
Predicate gt = null;
Predicate lb = null;
Predicate ub = null;
String lowValueField;
String highValueField;
if (isOrdinalComparison) {
lowValueField = "myValueLowDateOrdinal";
highValueField = "myValueHighDateOrdinal";
genericLowerBound = lowerBoundAsOrdinal;
genericUpperBound = upperBoundAsOrdinal;
} else {
lowValueField = "myValueLow";
highValueField = "myValueHigh";
genericLowerBound = lowerBoundInstant;
genericUpperBound = upperBoundInstant;
}
if (operation == SearchFilterParser.CompareOperation.lt) {
if (lowerBound == null) {
if (lowerBoundInstant == null) {
throw new InvalidRequestException("lowerBound value not correctly specified for compare operation");
}
lb = theBuilder.lessThan(theFrom.get("myValueLow"), lowerBound);
//im like 80% sure this should be ub and not lb, as it is an UPPER bound.
lb = theBuilder.lessThan(theFrom.get(lowValueField), genericLowerBound);
} else if (operation == SearchFilterParser.CompareOperation.le) {
if (upperBound == null) {
if (upperBoundInstant == null) {
throw new InvalidRequestException("upperBound value not correctly specified for compare operation");
}
lb = theBuilder.lessThanOrEqualTo(theFrom.get("myValueHigh"), upperBound);
//im like 80% sure this should be ub and not lb, as it is an UPPER bound.
lb = theBuilder.lessThanOrEqualTo(theFrom.get(highValueField), genericUpperBound);
} else if (operation == SearchFilterParser.CompareOperation.gt) {
if (upperBound == null) {
if (upperBoundInstant == null) {
throw new InvalidRequestException("upperBound value not correctly specified for compare operation");
}
lb = theBuilder.greaterThan(theFrom.get("myValueHigh"), upperBound);
} else if (operation == SearchFilterParser.CompareOperation.ge) {
if (lowerBound == null) {
throw new InvalidRequestException("lowerBound value not correctly specified for compare operation");
}
lb = theBuilder.greaterThanOrEqualTo(theFrom.get("myValueLow"), lowerBound);
} else if (operation == SearchFilterParser.CompareOperation.ne) {
if ((lowerBound == null) ||
(upperBound == null)) {
lb = theBuilder.greaterThan(theFrom.get(highValueField), genericUpperBound);
} else if (operation == SearchFilterParser.CompareOperation.ge) {
if (lowerBoundInstant == null) {
throw new InvalidRequestException("lowerBound value not correctly specified for compare operation");
}
lb = theBuilder.greaterThanOrEqualTo(theFrom.get(lowValueField), genericLowerBound);
} else if (operation == SearchFilterParser.CompareOperation.ne) {
if ((lowerBoundInstant == null) ||
(upperBoundInstant == null)) {
throw new InvalidRequestException("lowerBound and/or upperBound value not correctly specified for compare operation");
}
/*Predicate*/
lt = theBuilder.lessThanOrEqualTo(theFrom.get("myValueLow"), lowerBound);
/*Predicate*/
gt = theBuilder.greaterThanOrEqualTo(theFrom.get("myValueHigh"), upperBound);
lt = theBuilder.lessThan(theFrom.get(lowValueField), genericLowerBound);
gt = theBuilder.greaterThan(theFrom.get(highValueField), genericUpperBound);
lb = theBuilder.or(lt,
gt);
} else if ((operation == SearchFilterParser.CompareOperation.eq) ||
(operation == null)) {
if (lowerBound != null) {
/*Predicate*/
gt = theBuilder.greaterThanOrEqualTo(theFrom.get("myValueLow"), lowerBound);
/*Predicate*/
lt = theBuilder.greaterThanOrEqualTo(theFrom.get("myValueHigh"), lowerBound);
if (theRange.getLowerBound().getPrefix() == ParamPrefixEnum.STARTS_AFTER || theRange.getLowerBound().getPrefix() == ParamPrefixEnum.EQUAL) {
} else if ((operation == SearchFilterParser.CompareOperation.eq) || (operation == null)) {
if (lowerBoundInstant != null) {
gt = theBuilder.greaterThanOrEqualTo(theFrom.get(lowValueField), genericLowerBound);
lt = theBuilder.greaterThanOrEqualTo(theFrom.get(highValueField), genericLowerBound);
if (lowerBound.getPrefix() == ParamPrefixEnum.STARTS_AFTER || lowerBound.getPrefix() == ParamPrefixEnum.EQUAL) {
lb = gt;
} else {
lb = theBuilder.or(gt, lt);
}
}
if (upperBound != null) {
/*Predicate*/
gt = theBuilder.lessThanOrEqualTo(theFrom.get("myValueLow"), upperBound);
/*Predicate*/
lt = theBuilder.lessThanOrEqualTo(theFrom.get("myValueHigh"), upperBound);
if (upperBoundInstant != null) {
gt = theBuilder.lessThanOrEqualTo(theFrom.get(lowValueField), genericUpperBound);
lt = theBuilder.lessThanOrEqualTo(theFrom.get(highValueField), genericUpperBound);
if (theRange.getUpperBound().getPrefix() == ParamPrefixEnum.ENDS_BEFORE || theRange.getUpperBound().getPrefix() == ParamPrefixEnum.EQUAL) {
ub = lt;
} else {
@ -221,8 +250,11 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
throw new InvalidRequestException(String.format("Unsupported operator specified, operator=%s",
operation.name()));
}
ourLog.trace("Date range is {} - {}", lowerBound, upperBound);
if (isOrdinalComparison) {
ourLog.trace("Ordinal date range is {} - {} ", lowerBoundAsOrdinal, upperBoundAsOrdinal);
} else {
ourLog.trace("Date range is {} - {}", lowerBoundInstant, upperBoundInstant);
}
if (lb != null && ub != null) {
return (theBuilder.and(lb, ub));

View File

@ -61,7 +61,17 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.QualifiedParamList;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.*;
import ca.uhn.fhir.rest.param.CompositeParam;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.HasParam;
import ca.uhn.fhir.rest.param.NumberParam;
import ca.uhn.fhir.rest.param.ParameterUtil;
import ca.uhn.fhir.rest.param.QuantityParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.SpecialParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
@ -201,7 +211,7 @@ class PredicateBuilderReference extends BasePredicateBuilder {
}
// Resources by ID
List<ResourcePersistentId> targetPids = myIdHelperService.resolveResourcePersistentIdsWithCache(theRequestPartitionId, targetIds, theRequest);
List<ResourcePersistentId> targetPids = myIdHelperService.resolveResourcePersistentIdsWithCache(theRequestPartitionId, targetIds);
if (!targetPids.isEmpty()) {
ourLog.debug("Searching for resource link with target PIDs: {}", targetPids);
Predicate pathPredicate;
@ -565,7 +575,7 @@ class PredicateBuilderReference extends BasePredicateBuilder {
if (nextParamDef != null) {
if (myPartitionSettings.isPartitioningEnabled() && myPartitionSettings.isIncludePartitionInSearchHashes()) {
if (theRequestPartitionId == null) {
if (theRequestPartitionId.isAllPartitions()) {
throw new PreconditionFailedException("This server is not configured to support search against all partitions");
}
}

View File

@ -160,10 +160,6 @@ class PredicateBuilderTag extends BasePredicateBuilder {
continue;
} else {
myQueryRoot.setHasIndexJoins();
}
Join<ResourceTable, ResourceTag> tagJoin = myQueryRoot.join("myTags", JoinType.LEFT);

View File

@ -28,8 +28,10 @@ import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.dao.SearchBuilder;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.model.api.IQueryParameterType;
@ -39,6 +41,7 @@ import ca.uhn.fhir.rest.param.NumberParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.TokenParamModifier;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
import ca.uhn.fhir.util.VersionIndependentConcept;
import com.google.common.collect.Sets;
import org.hibernate.query.criteria.internal.CriteriaBuilderImpl;
@ -72,6 +75,8 @@ class PredicateBuilderToken extends BasePredicateBuilder implements IPredicateBu
private ITermReadSvc myTerminologySvc;
@Autowired
private ISearchParamRegistry mySearchParamRegistry;
@Autowired
private ModelConfig myModelConfig;
PredicateBuilderToken(SearchBuilder theSearchBuilder, PredicateBuilder thePredicateBuilder) {
super(theSearchBuilder);
@ -99,6 +104,20 @@ class PredicateBuilderToken extends BasePredicateBuilder implements IPredicateBu
if (nextOr instanceof TokenParam) {
TokenParam id = (TokenParam) nextOr;
if (id.isText()) {
// Check whether the :text modifier is actually enabled here
RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
boolean tokenTextIndexingEnabled = BaseSearchParamExtractor.tokenTextIndexingEnabledForSearchParam(myModelConfig, param);
if (!tokenTextIndexingEnabled) {
String msg;
if (myModelConfig.isSuppressStringIndexingInTokens()) {
msg = myContext.getLocalizer().getMessage(PredicateBuilderToken.class, "textModifierDisabledForServer");
}else{
msg = myContext.getLocalizer().getMessage(PredicateBuilderToken.class, "textModifierDisabledForSearchParam");
}
throw new MethodNotAllowedException(msg);
}
myPredicateBuilder.addPredicateString(theResourceName, theParamName, theList, theRequestPartitionId);
break;
}

View File

@ -20,6 +20,8 @@ package ca.uhn.fhir.jpa.entity;
* #L%
*/
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
@ -73,4 +75,7 @@ public class PartitionEntity {
myDescription = theDescription;
}
public RequestPartitionId toRequestPartitionId() {
return RequestPartitionId.fromPartitionIdAndName(getId(), getName());
}
}

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.entity;
*/
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.model.entity.ForcedId;
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
@ -48,6 +49,7 @@ import java.util.Date;
" h.res_updated as res_updated, " +
" h.res_text as res_text, " +
" h.res_encoding as res_encoding, " +
" h.PARTITION_ID as PARTITION_ID, " +
" p.SOURCE_URI as PROV_SOURCE_URI," +
" p.REQUEST_ID as PROV_REQUEST_ID," +
" f.forced_id as FORCED_PID " +
@ -94,6 +96,8 @@ public class ResourceSearchView implements IBaseResourceEntity, Serializable {
private ResourceEncodingEnum myEncoding;
@Column(name = "FORCED_PID", length = ForcedId.MAX_FORCED_ID_LENGTH)
private String myForcedPid;
@Column(name = "PARTITION_ID")
private Integer myPartitionId;
public ResourceSearchView() {
}
@ -187,6 +191,11 @@ public class ResourceSearchView implements IBaseResourceEntity, Serializable {
return myHasTags;
}
@Override
public RequestPartitionId getPartitionId() {
return RequestPartitionId.fromPartitionId(myPartitionId);
}
public byte[] getResource() {
return myResource;
}

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.partition;
*/
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
public interface IPartitionLookupSvc {
@ -30,11 +31,14 @@ public interface IPartitionLookupSvc {
void start();
/**
* @throws IllegalArgumentException If the name is not known
* @throws ResourceNotFoundException If the name is not known
*/
PartitionEntity getPartitionByName(String theName) throws IllegalArgumentException;
PartitionEntity getPartitionByName(String theName) throws ResourceNotFoundException;
PartitionEntity getPartitionById(Integer theId);
/**
* @throws ResourceNotFoundException If the ID is not known
*/
PartitionEntity getPartitionById(Integer theId) throws ResourceNotFoundException;
void clearCaches();

View File

@ -27,10 +27,10 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public interface IRequestPartitionHelperService {
@Nullable
RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, String theResourceType);
public interface IRequestPartitionHelperSvc {
@Nonnull
RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, String theResourceType);
@Nullable
RequestPartitionId determineCreatePartitionForRequest(@Nullable RequestDetails theRequest, @Nonnull IBaseResource theResource);
@Nonnull
RequestPartitionId determineCreatePartitionForRequest(@Nullable RequestDetails theRequest, @Nonnull IBaseResource theResource, @Nonnull String theResourceType);
}

View File

@ -24,6 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.dao.data.IPartitionDao;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import com.github.benmanes.caffeine.cache.CacheLoader;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.LoadingCache;
@ -97,9 +98,9 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc {
}
@Override
public PartitionEntity getPartitionById(Integer theId) {
Validate.notNull(theId, "The ID must not be null");
return myIdToPartitionCache.get(theId);
public PartitionEntity getPartitionById(Integer thePartitionId) {
validatePartitionIdSupplied(myFhirCtx, thePartitionId);
return myIdToPartitionCache.get(thePartitionId);
}
@Override
@ -158,7 +159,7 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc {
@Override
@Transactional
public void deletePartition(Integer thePartitionId) {
Validate.notNull(thePartitionId);
validatePartitionIdSupplied(myFhirCtx, thePartitionId);
if (DEFAULT_PERSISTED_PARTITION_ID == thePartitionId) {
String msg = myFhirCtx.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "cantDeleteDefaultPartition");
@ -204,7 +205,7 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc {
.findForName(theName)
.orElseThrow(() -> {
String msg = myFhirCtx.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "invalidName", theName);
return new IllegalArgumentException(msg);
return new ResourceNotFoundException(msg);
}));
}
}
@ -217,8 +218,15 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc {
.findById(theId)
.orElseThrow(() -> {
String msg = myFhirCtx.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "unknownPartitionId", theId);
return new IllegalArgumentException(msg);
return new ResourceNotFoundException(msg);
}));
}
}
public static void validatePartitionIdSupplied(FhirContext theFhirContext, Integer thePartitionId) {
if (thePartitionId == null) {
String msg = theFhirContext.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "noIdSupplied");
throw new InvalidRequestException(msg);
}
}
}

View File

@ -32,12 +32,14 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.jetbrains.annotations.NotNull;
import org.springframework.beans.factory.annotation.Autowired;
import static ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.validatePartitionIdSupplied;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hl7.fhir.instance.model.api.IPrimitiveType.toValueOrNull;
/**
* This HAPI FHIR Server Plain Provider class provides the following operations:
* <ul>
* <li><code>partition-management-add-partition</code></li>
* <li><code>partition-management-create-partition</code></li>
* <li><code>partition-management-update-partition</code></li>
* <li><code>partition-management-delete-partition</code></li>
* </ul>
@ -47,29 +49,52 @@ public class PartitionManagementProvider {
@Autowired
private FhirContext myCtx;
@Autowired
private IPartitionLookupSvc myPartitionConfigSvc;
private IPartitionLookupSvc myPartitionLookupSvc;
/**
* Add Partition:
* <code>
* $partition-management-add-partition
* $partition-management-create-partition
* </code>
*/
@Operation(name = ProviderConstants.PARTITION_MANAGEMENT_ADD_PARTITION)
@Operation(name = ProviderConstants.PARTITION_MANAGEMENT_CREATE_PARTITION)
public IBaseParameters addPartition(
@ResourceParam IBaseParameters theRequest,
@OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, min = 1, max = 1, typeName = "integer") IPrimitiveType<Integer> thePartitionId,
@OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_NAME, min = 1, max = 1, typeName = "code") IPrimitiveType<String> thePartitionName,
@OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_DESC, min = 0, max = 1, typeName = "string") IPrimitiveType<String> thePartitionDescription
) {
validatePartitionIdSupplied(myCtx, toValueOrNull(thePartitionId));
PartitionEntity input = parseInput(thePartitionId, thePartitionName, thePartitionDescription);
PartitionEntity output = myPartitionConfigSvc.createPartition(input);
// Note: Input validation happens inside IPartitionLookupSvc
PartitionEntity output = myPartitionLookupSvc.createPartition(input);
IBaseParameters retVal = prepareOutput(output);
return retVal;
}
/**
* Add Partition:
* <code>
* $partition-management-read-partition
* </code>
*/
@Operation(name = ProviderConstants.PARTITION_MANAGEMENT_READ_PARTITION, idempotent = true)
public IBaseParameters addPartition(
@ResourceParam IBaseParameters theRequest,
@OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, min = 1, max = 1, typeName = "integer") IPrimitiveType<Integer> thePartitionId
) {
validatePartitionIdSupplied(myCtx, toValueOrNull(thePartitionId));
// Note: Input validation happens inside IPartitionLookupSvc
PartitionEntity output = myPartitionLookupSvc.getPartitionById(thePartitionId.getValue());
return prepareOutput(output);
}
/**
* Add Partition:
* <code>
@ -83,9 +108,13 @@ public class PartitionManagementProvider {
@OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_NAME, min = 1, max = 1, typeName = "code") IPrimitiveType<String> thePartitionName,
@OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_DESC, min = 0, max = 1, typeName = "string") IPrimitiveType<String> thePartitionDescription
) {
validatePartitionIdSupplied(myCtx, toValueOrNull(thePartitionId));
PartitionEntity input = parseInput(thePartitionId, thePartitionName, thePartitionDescription);
PartitionEntity output = myPartitionConfigSvc.updatePartition(input);
// Note: Input validation happens inside IPartitionLookupSvc
PartitionEntity output = myPartitionLookupSvc.updatePartition(input);
IBaseParameters retVal = prepareOutput(output);
return retVal;
@ -102,8 +131,9 @@ public class PartitionManagementProvider {
@ResourceParam IBaseParameters theRequest,
@OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, min = 1, max = 1, typeName = "integer") IPrimitiveType<Integer> thePartitionId
) {
myPartitionConfigSvc.deletePartition(thePartitionId.getValue());
validatePartitionIdSupplied(myCtx, toValueOrNull(thePartitionId));
myPartitionLookupSvc.deletePartition(thePartitionId.getValue());
IBaseParameters retVal = ParametersUtil.newInstance(myCtx);
ParametersUtil.addParameterToParametersString(myCtx, retVal, "message", "Success");

View File

@ -24,7 +24,6 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
@ -41,14 +40,13 @@ import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.HashSet;
import static ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster.doCallHooks;
import static ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster.doCallHooksAndReturnObject;
public class RequestPartitionHelperService implements IRequestPartitionHelperService {
public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
private final HashSet<Object> myPartitioningBlacklist;
@Autowired
private DaoConfig myDaoConfig;
@Autowired
private IInterceptorBroadcaster myInterceptorBroadcaster;
@Autowired
@ -58,7 +56,7 @@ public class RequestPartitionHelperService implements IRequestPartitionHelperSer
@Autowired
private PartitionSettings myPartitionSettings;
public RequestPartitionHelperService() {
public RequestPartitionHelperSvc() {
myPartitioningBlacklist = new HashSet<>();
// Infrastructure
@ -78,37 +76,45 @@ public class RequestPartitionHelperService implements IRequestPartitionHelperSer
/**
* Invoke the <code>STORAGE_PARTITION_IDENTIFY_READ</code> interceptor pointcut to determine the tenant for a read request
*/
@Nullable
@Nonnull
@Override
public RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, String theResourceType) {
if (myPartitioningBlacklist.contains(theResourceType)) {
return null;
}
RequestPartitionId requestPartitionId = null;
RequestPartitionId requestPartitionId;
if (myPartitionSettings.isPartitioningEnabled()) {
// Handle system requests
if (theRequest == null && myPartitioningBlacklist.contains(theResourceType)) {
return RequestPartitionId.defaultPartition();
}
// Interceptor call: STORAGE_PARTITION_IDENTIFY_READ
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_READ, params);
validatePartition(requestPartitionId, theResourceType);
validatePartition(requestPartitionId, theResourceType, Pointcut.STORAGE_PARTITION_IDENTIFY_READ);
return normalizeAndNotifyHooks(requestPartitionId, theRequest);
}
return normalize(requestPartitionId);
return RequestPartitionId.allPartitions();
}
/**
* Invoke the <code>STORAGE_PARTITION_IDENTIFY_CREATE</code> interceptor pointcut to determine the tenant for a read request
*/
@Nullable
@Nonnull
@Override
public RequestPartitionId determineCreatePartitionForRequest(@Nullable RequestDetails theRequest, @Nonnull IBaseResource theResource) {
public RequestPartitionId determineCreatePartitionForRequest(@Nullable RequestDetails theRequest, @Nonnull IBaseResource theResource, @Nonnull String theResourceType) {
RequestPartitionId requestPartitionId;
RequestPartitionId requestPartitionId = null;
if (myPartitionSettings.isPartitioningEnabled()) {
// Handle system requests
if (theRequest == null && myPartitioningBlacklist.contains(theResourceType)) {
return RequestPartitionId.defaultPartition();
}
// Interceptor call: STORAGE_PARTITION_IDENTIFY_CREATE
HookParams params = new HookParams()
.add(IBaseResource.class, theResource)
@ -117,62 +123,70 @@ public class RequestPartitionHelperService implements IRequestPartitionHelperSer
requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE, params);
String resourceName = myFhirContext.getResourceDefinition(theResource).getName();
validatePartition(requestPartitionId, resourceName);
validatePartition(requestPartitionId, resourceName, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE);
return normalizeAndNotifyHooks(requestPartitionId, theRequest);
}
return normalize(requestPartitionId);
return RequestPartitionId.allPartitions();
}
/**
* If the partition only has a name but not an ID, this method resolves the ID
* @param theRequestPartitionId
* @return
*/
private RequestPartitionId normalize(RequestPartitionId theRequestPartitionId) {
if (theRequestPartitionId != null) {
if (theRequestPartitionId.getPartitionName() != null) {
@Nonnull
private RequestPartitionId normalizeAndNotifyHooks(@Nonnull RequestPartitionId theRequestPartitionId, RequestDetails theRequest) {
RequestPartitionId retVal = theRequestPartitionId;
PartitionEntity partition;
try {
partition = myPartitionConfigSvc.getPartitionByName(theRequestPartitionId.getPartitionName());
} catch (IllegalArgumentException e) {
String msg = myFhirContext.getLocalizer().getMessage(RequestPartitionHelperService.class, "unknownPartitionName", theRequestPartitionId.getPartitionName());
throw new ResourceNotFoundException(msg);
}
if (retVal.getPartitionName() != null) {
if (theRequestPartitionId.getPartitionId() != null) {
Validate.isTrue(theRequestPartitionId.getPartitionId().equals(partition.getId()), "Partition name %s does not match ID %n", theRequestPartitionId.getPartitionName(), theRequestPartitionId.getPartitionId());
return theRequestPartitionId;
} else {
return RequestPartitionId.forPartitionNameAndId(theRequestPartitionId.getPartitionName(), partition.getId(), theRequestPartitionId.getPartitionDate());
}
PartitionEntity partition;
try {
partition = myPartitionConfigSvc.getPartitionByName(retVal.getPartitionName());
} catch (IllegalArgumentException e) {
String msg = myFhirContext.getLocalizer().getMessage(RequestPartitionHelperSvc.class, "unknownPartitionName", retVal.getPartitionName());
throw new ResourceNotFoundException(msg);
}
if (theRequestPartitionId.getPartitionId() != null) {
PartitionEntity partition;
try {
partition = myPartitionConfigSvc.getPartitionById(theRequestPartitionId.getPartitionId());
} catch (IllegalArgumentException e) {
String msg = myFhirContext.getLocalizer().getMessage(RequestPartitionHelperService.class, "unknownPartitionId", theRequestPartitionId.getPartitionId());
throw new ResourceNotFoundException(msg);
}
return RequestPartitionId.forPartitionNameAndId(partition.getName(), partition.getId(), theRequestPartitionId.getPartitionDate());
if (retVal.getPartitionId() != null) {
Validate.isTrue(retVal.getPartitionId().equals(partition.getId()), "Partition name %s does not match ID %n", retVal.getPartitionName(), retVal.getPartitionId());
} else {
retVal = RequestPartitionId.forPartitionIdAndName(partition.getId(), retVal.getPartitionName(), retVal.getPartitionDate());
}
} else if (retVal.getPartitionId() != null) {
PartitionEntity partition;
try {
partition = myPartitionConfigSvc.getPartitionById(retVal.getPartitionId());
} catch (IllegalArgumentException e) {
String msg = myFhirContext.getLocalizer().getMessage(RequestPartitionHelperSvc.class, "unknownPartitionId", retVal.getPartitionId());
throw new ResourceNotFoundException(msg);
}
retVal = RequestPartitionId.forPartitionIdAndName(partition.getId(), partition.getName(), retVal.getPartitionDate());
}
// It's still possible that the partition only has a date but no name/id,
// or it could just be null
return theRequestPartitionId;
// Note: It's still possible that the partition only has a date but no name/id
HookParams params = new HookParams()
.add(RequestPartitionId.class, retVal)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_SELECTED, params);
return retVal;
}
private void validatePartition(@Nullable RequestPartitionId theRequestPartitionId, @Nonnull String theResourceName) {
if (theRequestPartitionId != null && theRequestPartitionId.getPartitionId() != null) {
private void validatePartition(@Nullable RequestPartitionId theRequestPartitionId, @Nonnull String theResourceName, Pointcut thePointcut) {
Validate.notNull(theRequestPartitionId, "Interceptor did not provide a value for pointcut: %s", thePointcut);
if (theRequestPartitionId.getPartitionId() != null) {
// Make sure we're not using one of the conformance resources in a non-default partition
if (myPartitioningBlacklist.contains(theResourceName)) {
String msg = myFhirContext.getLocalizer().getMessageSanitized(RequestPartitionHelperService.class, "blacklistedResourceTypeForPartitioning", theResourceName);
String msg = myFhirContext.getLocalizer().getMessageSanitized(RequestPartitionHelperSvc.class, "blacklistedResourceTypeForPartitioning", theResourceName);
throw new UnprocessableEntityException(msg);
}
@ -180,7 +194,7 @@ public class RequestPartitionHelperService implements IRequestPartitionHelperSer
try {
myPartitionConfigSvc.getPartitionById(theRequestPartitionId.getPartitionId());
} catch (IllegalArgumentException e) {
String msg = myFhirContext.getLocalizer().getMessageSanitized(RequestPartitionHelperService.class, "unknownPartitionId", theRequestPartitionId.getPartitionId());
String msg = myFhirContext.getLocalizer().getMessageSanitized(RequestPartitionHelperSvc.class, "unknownPartitionId", theRequestPartitionId.getPartitionId());
throw new InvalidRequestException(msg);
}

View File

@ -24,9 +24,12 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.dao.HistoryBuilder;
import ca.uhn.fhir.jpa.dao.HistoryBuilderFactory;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.entity.Search;
@ -34,6 +37,7 @@ import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
import ca.uhn.fhir.jpa.util.InterceptorUtil;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
@ -47,6 +51,7 @@ import ca.uhn.fhir.rest.api.server.SimplePreResourceShowDetails;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import com.google.common.annotations.VisibleForTesting;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -59,14 +64,8 @@ import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nonnull;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
@ -87,6 +86,8 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
@Autowired
private SearchBuilderFactory mySearchBuilderFactory;
@Autowired
private HistoryBuilderFactory myHistoryBuilderFactory;
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
protected PlatformTransactionManager myTxManager;
@ -96,6 +97,8 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
private ISearchCoordinatorSvc mySearchCoordinatorSvc;
@Autowired
private ISearchCacheSvc mySearchCacheSvc;
@Autowired
private RequestPartitionHelperSvc myRequestPartitionHelperSvc;
/*
* Non autowired fields (will be different for every instance
@ -106,6 +109,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
private Search mySearchEntity;
private String myUuid;
private boolean myCacheHit;
private RequestPartitionId myRequestPartitionId;
/**
* Constructor
@ -115,6 +119,14 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
myUuid = theSearchUuid;
}
/**
* Constructor
*/
public PersistedJpaBundleProvider(RequestDetails theRequest, Search theSearch) {
myRequest = theRequest;
mySearchEntity = theSearch;
}
/**
* When HAPI FHIR server is running "for real", a new
* instance of the bundle provider is created to serve
@ -127,45 +139,17 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
mySearchEntity = null;
}
/**
* Perform a history search
*/
private List<IBaseResource> doHistoryInTransaction(int theFromIndex, int theToIndex) {
List<ResourceHistoryTable> results;
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
CriteriaQuery<ResourceHistoryTable> q = cb.createQuery(ResourceHistoryTable.class);
Root<ResourceHistoryTable> from = q.from(ResourceHistoryTable.class);
List<Predicate> predicates = new ArrayList<>();
HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(), mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh());
if (mySearchEntity.getResourceType() == null) {
// All resource types
} else if (mySearchEntity.getResourceId() == null) {
predicates.add(cb.equal(from.get("myResourceType"), mySearchEntity.getResourceType()));
} else {
predicates.add(cb.equal(from.get("myResourceId"), mySearchEntity.getResourceId()));
}
RequestPartitionId partitionId = getRequestPartitionId();
List<ResourceHistoryTable> results = historyBuilder.fetchEntities(partitionId, theFromIndex, theToIndex);
if (mySearchEntity.getLastUpdatedLow() != null) {
predicates.add(cb.greaterThanOrEqualTo(from.get("myUpdated").as(Date.class), mySearchEntity.getLastUpdatedLow()));
}
if (mySearchEntity.getLastUpdatedHigh() != null) {
predicates.add(cb.lessThanOrEqualTo(from.get("myUpdated").as(Date.class), mySearchEntity.getLastUpdatedHigh()));
}
if (predicates.size() > 0) {
q.where(predicates.toArray(new Predicate[0]));
}
q.orderBy(cb.desc(from.get("myUpdated")));
TypedQuery<ResourceHistoryTable> query = myEntityManager.createQuery(q);
if (theToIndex - theFromIndex > 0) {
query.setFirstResult(theFromIndex);
query.setMaxResults(theToIndex - theFromIndex);
}
results = query.getResultList();
ArrayList<IBaseResource> retVal = new ArrayList<>();
List<IBaseResource> retVal = new ArrayList<>();
for (ResourceHistoryTable next : results) {
BaseHasResource resource;
resource = next;
@ -199,12 +183,26 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
.add(RequestDetails.class, myRequest)
.addIfMatchesType(ServletRequestDetails.class, myRequest);
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params);
retVal = showDetails.toList();
}
return retVal;
}
@NotNull
private RequestPartitionId getRequestPartitionId() {
if (myRequestPartitionId == null) {
if (mySearchEntity.getResourceId() != null) {
// If we have an ID, we've already checked the partition and made sure it's appropriate
myRequestPartitionId = RequestPartitionId.allPartitions();
} else {
myRequestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(myRequest, mySearchEntity.getResourceType());
}
}
return myRequestPartitionId;
}
protected List<IBaseResource> doSearchOrEverything(final int theFromIndex, final int theToIndex) {
if (mySearchEntity.getTotalCount() != null && mySearchEntity.getNumFound() <= 0) {
// No resources to fetch (e.g. we did a _summary=count search)
@ -239,6 +237,17 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
return true;
}
if (mySearchEntity.getSearchType() == SearchTypeEnum.HISTORY) {
if (mySearchEntity.getTotalCount() == null) {
new TransactionTemplate(myTxManager).executeWithoutResult(t->{
HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(), mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh());
Long count = historyBuilder.fetchCount(getRequestPartitionId());
mySearchEntity.setTotalCount(count.intValue());
});
}
}
return true;
}
@ -361,7 +370,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
List<IBaseResource> resources = new ArrayList<>();
theSearchBuilder.loadResourcesByPid(thePids, includedPidList, resources, false, myRequest);
InterceptorUtil.fireStoragePreshowResource(resources, myRequest, myInterceptorBroadcaster);
resources = InterceptorUtil.fireStoragePreshowResource(resources, myRequest, myInterceptorBroadcaster);
return resources;
}

View File

@ -37,6 +37,11 @@ public class PersistedJpaBundleProviderFactory {
return (PersistedJpaBundleProvider) retVal;
}
public PersistedJpaBundleProvider newInstance(RequestDetails theRequest, Search theSearch) {
Object retVal = myApplicationContext.getBean(BaseConfig.PERSISTED_JPA_BUNDLE_PROVIDER_BY_SEARCH, theRequest, theSearch);
return (PersistedJpaBundleProvider) retVal;
}
public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(RequestDetails theRequestDetails, Search theSearch, SearchCoordinatorSvcImpl.SearchTask theTask, ISearchBuilder theSearchBuilder) {
return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean(BaseConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, theRequestDetails, theSearch, theTask, theSearchBuilder);
}

View File

@ -33,7 +33,7 @@ import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperService;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchInclude;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
@ -156,7 +156,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
@Autowired
private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory;
@Autowired
private IRequestPartitionHelperService myRequestPartitionHelperService;
private IRequestPartitionHelperSvc myRequestPartitionHelperService;
/**
* Constructor
@ -514,7 +514,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
theSb.loadResourcesByPid(pids, includedPidsList, resources, false, theRequestDetails);
// Hook: STORAGE_PRESHOW_RESOURCES
InterceptorUtil.fireStoragePreshowResource(resources, theRequestDetails, myInterceptorBroadcaster);
resources = InterceptorUtil.fireStoragePreshowResource(resources, theRequestDetails, myInterceptorBroadcaster);
return new SimpleBundleProvider(resources);
});
@ -594,7 +594,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
}
@VisibleForTesting
public void setRequestPartitionHelperService(IRequestPartitionHelperService theRequestPartitionHelperService) {
public void setRequestPartitionHelperService(IRequestPartitionHelperSvc theRequestPartitionHelperService) {
myRequestPartitionHelperService = theRequestPartitionHelperService;
}

View File

@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.dao.IHapiJpaRepository;
@ -117,7 +118,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
@Override
public ResourcePersistentId getValueSetResourcePid(IIdType theIdType) {
return myIdHelperService.resolveResourcePersistentIds(null, theIdType.getResourceType(), theIdType.getIdPart());
return myIdHelperService.resolveResourcePersistentIds(RequestPartitionId.allPartitions(), theIdType.getResourceType(), theIdType.getIdPart());
}
@Transactional
@ -291,7 +292,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
Validate.notBlank(theCodeSystemResource.getUrl(), "theCodeSystemResource must have a URL");
IIdType csId = myTerminologyVersionAdapterSvc.createOrUpdateCodeSystem(theCodeSystemResource);
ResourcePersistentId codeSystemResourcePid = myIdHelperService.resolveResourcePersistentIds(null, csId.getResourceType(), csId.getIdPart());
ResourcePersistentId codeSystemResourcePid = myIdHelperService.resolveResourcePersistentIds(RequestPartitionId.allPartitions(), csId.getResourceType(), csId.getIdPart());
ResourceTable resource = myResourceTableDao.getOne(codeSystemResourcePid.getIdAsLong());
ourLog.info("CodeSystem resource has ID: {}", csId.getValue());
@ -551,7 +552,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
}
private ResourcePersistentId getCodeSystemResourcePid(IIdType theIdType) {
return myIdHelperService.resolveResourcePersistentIds(null, theIdType.getResourceType(), theIdType.getIdPart());
return myIdHelperService.resolveResourcePersistentIds(RequestPartitionId.allPartitions(), theIdType.getResourceType(), theIdType.getIdPart());
}
private void persistChildren(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, IdentityHashMap<TermConcept, Object> theConceptsStack, int theTotalConcepts) {

View File

@ -36,24 +36,28 @@ public class InterceptorUtil {
/**
* Fires {@link Pointcut#STORAGE_PRESHOW_RESOURCES} interceptor hook, and potentially remove resources
* from the resource list
* @return
*/
public static void fireStoragePreshowResource(List<IBaseResource> theResources, RequestDetails theRequest, IInterceptorBroadcaster theInterceptorBroadcaster) {
theResources.removeIf(t -> t == null);
public static List<IBaseResource> fireStoragePreshowResource(List<IBaseResource> theResources, RequestDetails theRequest, IInterceptorBroadcaster theInterceptorBroadcaster) {
List<IBaseResource> retVal = theResources;
retVal.removeIf(t -> t == null);
// Interceptor call: STORAGE_PRESHOW_RESOURCE
// This can be used to remove results from the search result details before
// the user has a chance to know that they were in the results
if (theResources.size() > 0) {
SimplePreResourceShowDetails accessDetails = new SimplePreResourceShowDetails(theResources);
if (retVal.size() > 0) {
SimplePreResourceShowDetails accessDetails = new SimplePreResourceShowDetails(retVal);
HookParams params = new HookParams()
.add(IPreResourceShowDetails.class, accessDetails)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
JpaInterceptorBroadcaster.doCallHooks(theInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params);
theResources.removeIf(t -> t == null);
retVal = accessDetails.toList();
retVal.removeIf(t -> t == null);
}
return retVal;
}
}

View File

@ -0,0 +1,45 @@
package ca.uhn.fhir.jpa.util;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2020 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.dao.SearchBuilder;
import java.util.List;
import java.util.function.Consumer;
/**
* As always, Oracle can't handle things that other databases don't mind.. In this
* case it doesn't like more than ~1000 IDs in a single load, so we break this up
* if it's lots of IDs. I suppose maybe we should be doing this as a join anyhow
* but this should work too. Sigh.
*/
public class QueryChunker<T> {
public void chunk(List<T> theInput, Consumer<List<T>> theBatchConsumer) {
for (int i = 0; i < theInput.size(); i += SearchBuilder.MAXIMUM_PAGE_SIZE) {
int to = i + SearchBuilder.MAXIMUM_PAGE_SIZE;
to = Math.min(to, theInput.size());
List<T> batch = theInput.subList(i, to);
theBatchConsumer.accept(batch);
}
}
}

View File

@ -6,6 +6,7 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.executor.InterceptorService;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
import ca.uhn.fhir.test.BaseTest;
import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc;
@ -111,6 +112,8 @@ public abstract class BaseJpaTest extends BaseTest {
protected ISearchCacheSvc mySearchCacheSvc;
@Autowired
protected IPartitionLookupSvc myPartitionConfigSvc;
@Autowired
private IdHelperService myIdHelperService;
@After
public void afterPerformCleanup() {
@ -121,6 +124,10 @@ public abstract class BaseJpaTest extends BaseTest {
if (myPartitionConfigSvc != null) {
myPartitionConfigSvc.clearCaches();
}
if (myIdHelperService != null) {
myIdHelperService.clearCache();
}
}
@After

View File

@ -46,6 +46,22 @@ public class TolerantJsonParserR4Test {
assertEquals("0.5", obs.getValueQuantity().getValueElement().getValueAsString());
}
@Test
public void testParseInvalidNumeric_DoubleZeros() {
String input = "{\n" +
"\"resourceType\": \"Observation\",\n" +
"\"valueQuantity\": {\n" +
" \"value\": 00\n" +
" }\n" +
"}";
TolerantJsonParser parser = new TolerantJsonParser(myFhirContext, new LenientErrorHandler());
Observation obs = parser.parseResource(Observation.class, input);
assertEquals("0", obs.getValueQuantity().getValueElement().getValueAsString());
}
@Test
public void testParseInvalidNumeric2() {
String input = "{\n" +

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.dao.dstu2;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3Test;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
@ -29,10 +30,12 @@ import org.hamcrest.core.StringContains;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.*;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.hamcrest.Matchers.*;
@ -1051,6 +1054,9 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
assertGone(org2Id);
}
@Autowired
private IForcedIdDao myForcedIdDao;
@Test
public void testHistoryByForcedId() {
IIdType idv1;
@ -1067,6 +1073,10 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
idv2 = myPatientDao.update(patient, mySrd).getId();
}
runInTransaction(()->{
ourLog.info("Forced IDs:\n{}", myForcedIdDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n")));
});
List<Patient> patients = toList(myPatientDao.history(idv1.toVersionless(), null, null, mySrd));
assertTrue(patients.size() == 2);
// Newest first
@ -1111,7 +1121,7 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
for (int i = 0; i < fullSize; i++) {
String expected = id.withVersion(Integer.toString(fullSize + 1 - i)).getValue();
String actual = history.getResources(i, i + 1).get(0).getIdElement().getValue();
assertEquals(expected, actual);
assertEquals("Failure at " + i, expected, actual);
}
// By type

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.interceptor.api.IInterceptorService;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
@ -12,14 +13,40 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoStructureDefinition;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoSubscription;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.rp.ResourceProviderFactory;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
import ca.uhn.fhir.jpa.bulk.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestR4Config;
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.data.*;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedCompositeStringUniqueDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamDateDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamQuantityDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamStringDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.dao.data.IResourceReindexJobDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapGroupElementTargetDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
@ -33,7 +60,6 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
import ca.uhn.fhir.jpa.provider.r4.JpaSystemProviderR4;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc;
@ -47,7 +73,6 @@ import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.jpa.term.api.ITermReadSvcR4;
import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.jpa.api.rp.ResourceProviderFactory;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
@ -64,11 +89,63 @@ import org.hibernate.search.jpa.Search;
import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport;
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.r4.model.AllergyIntolerance;
import org.hl7.fhir.r4.model.Appointment;
import org.hl7.fhir.r4.model.AuditEvent;
import org.hl7.fhir.r4.model.Binary;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.CarePlan;
import org.hl7.fhir.r4.model.CareTeam;
import org.hl7.fhir.r4.model.ChargeItem;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.CodeableConcept;
import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.Communication;
import org.hl7.fhir.r4.model.CommunicationRequest;
import org.hl7.fhir.r4.model.CompartmentDefinition;
import org.hl7.fhir.r4.model.ConceptMap;
import org.hl7.fhir.r4.model.ConceptMap.ConceptMapGroupComponent;
import org.hl7.fhir.r4.model.ConceptMap.SourceElementComponent;
import org.hl7.fhir.r4.model.ConceptMap.TargetElementComponent;
import org.hl7.fhir.r4.model.Condition;
import org.hl7.fhir.r4.model.Consent;
import org.hl7.fhir.r4.model.Coverage;
import org.hl7.fhir.r4.model.Device;
import org.hl7.fhir.r4.model.DiagnosticReport;
import org.hl7.fhir.r4.model.DocumentReference;
import org.hl7.fhir.r4.model.Encounter;
import org.hl7.fhir.r4.model.Enumerations.ConceptMapEquivalence;
import org.hl7.fhir.r4.model.EpisodeOfCare;
import org.hl7.fhir.r4.model.Group;
import org.hl7.fhir.r4.model.Immunization;
import org.hl7.fhir.r4.model.ImmunizationRecommendation;
import org.hl7.fhir.r4.model.Location;
import org.hl7.fhir.r4.model.Media;
import org.hl7.fhir.r4.model.Medication;
import org.hl7.fhir.r4.model.MedicationAdministration;
import org.hl7.fhir.r4.model.MedicationRequest;
import org.hl7.fhir.r4.model.Meta;
import org.hl7.fhir.r4.model.MolecularSequence;
import org.hl7.fhir.r4.model.NamingSystem;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.OperationDefinition;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Practitioner;
import org.hl7.fhir.r4.model.PractitionerRole;
import org.hl7.fhir.r4.model.Procedure;
import org.hl7.fhir.r4.model.Provenance;
import org.hl7.fhir.r4.model.Questionnaire;
import org.hl7.fhir.r4.model.QuestionnaireResponse;
import org.hl7.fhir.r4.model.RiskAssessment;
import org.hl7.fhir.r4.model.SearchParameter;
import org.hl7.fhir.r4.model.ServiceRequest;
import org.hl7.fhir.r4.model.StructureDefinition;
import org.hl7.fhir.r4.model.Subscription;
import org.hl7.fhir.r4.model.Substance;
import org.hl7.fhir.r4.model.Task;
import org.hl7.fhir.r4.model.UriType;
import org.hl7.fhir.r4.model.ValueSet;
import org.hl7.fhir.r5.utils.IResourceValidator;
import org.junit.After;
import org.junit.AfterClass;
@ -102,6 +179,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
private static IValidationSupport ourJpaValidationSupportChainR4;
private static IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> ourValueSetDao;
@Autowired
protected IPartitionLookupSvc myPartitionConfigSvc;
@Autowired
@ -375,7 +453,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
private PerformanceTracingLoggingInterceptor myPerformanceTracingLoggingInterceptor;
private List<Object> mySystemInterceptors;
@Autowired
private DaoRegistry myDaoRegistry;
protected DaoRegistry myDaoRegistry;
@Autowired
private IBulkDataExportSvc myBulkDataExportSvc;
@Autowired
@ -408,8 +486,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
BaseTermReadSvcImpl.clearOurLastResultsFromTranslationWithReverseCache();
TermDeferredStorageSvcImpl termDeferredStorageSvc = AopTestUtils.getTargetObject(myTerminologyDeferredStorageSvc);
termDeferredStorageSvc.clearDeferred();
myIdHelperService.clearCache();
}
@After()

View File

@ -316,7 +316,6 @@ public class ConsentEventsDaoR4Test extends BaseJpaR4SystemTest {
ourLog.info("Search UUID: {}", outcome.getUuid());
// Fetch the first 10 (don't cross a fetch boundary)
outcome = myPagingProvider.retrieveResultList(mySrd, outcome.getUuid());
List<IBaseResource> resources = outcome.getResources(0, 10);
List<String> returnedIdValues = toUnqualifiedVersionlessIdValues(resources);
ourLog.info("Returned values: {}", returnedIdValues);

View File

@ -2,8 +2,11 @@ package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.SqlQuery;
import ca.uhn.fhir.jpa.util.TestUtil;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.ReferenceParam;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Patient;
@ -12,6 +15,8 @@ import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import java.util.List;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
@ -224,6 +229,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
myCaptureQueriesListener.logAllQueriesForCurrentThread();
// select: lookup forced ID
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertNoPartitionSelectors();
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
// insert to: HFJ_RESOURCE, HFJ_RES_VER, HFJ_RES_LINK
assertEquals(3, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
@ -246,6 +252,127 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
}
public void assertNoPartitionSelectors() {
List<SqlQuery> selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread();
for (SqlQuery next : selectQueries) {
assertEquals(0, StringUtils.countMatches(next.getSql(true, true).toLowerCase(), "partition_id is null"));
assertEquals(0, StringUtils.countMatches(next.getSql(true, true).toLowerCase(), "partition_id="));
assertEquals(0, StringUtils.countMatches(next.getSql(true, true).toLowerCase(), "partition_id ="));
}
}
@Test
public void testHistory_Server() {
runInTransaction(() -> {
Patient p = new Patient();
p.setId("A");
p.addIdentifier().setSystem("urn:system").setValue("1");
myPatientDao.update(p).getId().toUnqualified();
p = new Patient();
p.setId("B");
p.addIdentifier().setSystem("urn:system").setValue("2");
myPatientDao.update(p).getId().toUnqualified();
p = new Patient();
p.addIdentifier().setSystem("urn:system").setValue("2");
myPatientDao.create(p).getId().toUnqualified();
});
myCaptureQueriesListener.clear();
runInTransaction(() -> {
IBundleProvider history = mySystemDao.history(null, null, null);
assertEquals(3, history.getResources(0, 99).size());
});
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
// Perform count, Search history table, resolve forced IDs
assertEquals(3, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
assertNoPartitionSelectors();
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
// Second time should leverage forced ID cache
myCaptureQueriesListener.clear();
runInTransaction(() -> {
IBundleProvider history = mySystemDao.history(null, null, null);
assertEquals(3, history.getResources(0, 99).size());
});
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
// Perform count, Search history table
assertEquals(2, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
}
/**
* This could definitely stand to be optimized some, since we load tags individually
* for each resource
*/
@Test
public void testHistory_Server_WithTags() {
runInTransaction(() -> {
Patient p = new Patient();
p.getMeta().addTag("system", "code1", "displaY1");
p.getMeta().addTag("system", "code2", "displaY2");
p.setId("A");
p.addIdentifier().setSystem("urn:system").setValue("1");
myPatientDao.update(p).getId().toUnqualified();
p = new Patient();
p.getMeta().addTag("system", "code1", "displaY1");
p.getMeta().addTag("system", "code2", "displaY2");
p.setId("B");
p.addIdentifier().setSystem("urn:system").setValue("2");
myPatientDao.update(p).getId().toUnqualified();
p = new Patient();
p.getMeta().addTag("system", "code1", "displaY1");
p.getMeta().addTag("system", "code2", "displaY2");
p.addIdentifier().setSystem("urn:system").setValue("2");
myPatientDao.create(p).getId().toUnqualified();
});
myCaptureQueriesListener.clear();
runInTransaction(() -> {
IBundleProvider history = mySystemDao.history(null, null, null);
assertEquals(3, history.getResources(0, 3).size());
});
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
// Perform count, Search history table, resolve forced IDs, load tags (x3)
assertEquals(6, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
// Second time should leverage forced ID cache
myCaptureQueriesListener.clear();
runInTransaction(() -> {
IBundleProvider history = mySystemDao.history(null, null, null);
assertEquals(3, history.getResources(0, 3).size());
});
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
// Perform count, Search history table, load tags (x3)
assertEquals(5, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
}
@Test
public void testSearchUsingForcedIdReference() {
@ -267,6 +394,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
assertEquals(1, myObservationDao.search(map).size().intValue());
// Resolve forced ID, Perform search, load result
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertNoPartitionSelectors();
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());

View File

@ -7,6 +7,7 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
@ -16,6 +17,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap.EverythingModeEnum;
@ -26,7 +28,27 @@ import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.*;
import ca.uhn.fhir.rest.param.CompositeParam;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.HasAndListParam;
import ca.uhn.fhir.rest.param.HasOrListParam;
import ca.uhn.fhir.rest.param.HasParam;
import ca.uhn.fhir.rest.param.NumberParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
import ca.uhn.fhir.rest.param.QuantityParam;
import ca.uhn.fhir.rest.param.ReferenceAndListParam;
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringAndListParam;
import ca.uhn.fhir.rest.param.StringOrListParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenAndListParam;
import ca.uhn.fhir.rest.param.TokenOrListParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.TokenParamModifier;
import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.param.UriParamQualifierEnum;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
import com.google.common.collect.Lists;
@ -36,15 +58,66 @@ import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.r4.model.Age;
import org.hl7.fhir.r4.model.Appointment;
import org.hl7.fhir.r4.model.AuditEvent;
import org.hl7.fhir.r4.model.BooleanType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.r4.model.Bundle.BundleType;
import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
import org.hl7.fhir.r4.model.CareTeam;
import org.hl7.fhir.r4.model.ChargeItem;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.CodeType;
import org.hl7.fhir.r4.model.CodeableConcept;
import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.CommunicationRequest;
import org.hl7.fhir.r4.model.Condition;
import org.hl7.fhir.r4.model.ContactPoint.ContactPointSystem;
import org.hl7.fhir.r4.model.DateTimeType;
import org.hl7.fhir.r4.model.DateType;
import org.hl7.fhir.r4.model.DecimalType;
import org.hl7.fhir.r4.model.Device;
import org.hl7.fhir.r4.model.DiagnosticReport;
import org.hl7.fhir.r4.model.Encounter;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender;
import org.hl7.fhir.r4.model.EpisodeOfCare;
import org.hl7.fhir.r4.model.Group;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.InstantType;
import org.hl7.fhir.r4.model.IntegerType;
import org.hl7.fhir.r4.model.Location;
import org.hl7.fhir.r4.model.Medication;
import org.hl7.fhir.r4.model.MedicationAdministration;
import org.hl7.fhir.r4.model.MedicationRequest;
import org.hl7.fhir.r4.model.MolecularSequence;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Observation.ObservationStatus;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Period;
import org.hl7.fhir.r4.model.Practitioner;
import org.hl7.fhir.r4.model.Provenance;
import org.hl7.fhir.r4.model.Quantity;
import org.hl7.fhir.r4.model.Questionnaire;
import org.hl7.fhir.r4.model.QuestionnaireResponse;
import org.hl7.fhir.r4.model.Range;
import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.RiskAssessment;
import org.hl7.fhir.r4.model.SearchParameter;
import org.hl7.fhir.r4.model.ServiceRequest;
import org.hl7.fhir.r4.model.SimpleQuantity;
import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.StructureDefinition;
import org.hl7.fhir.r4.model.Subscription;
import org.hl7.fhir.r4.model.Subscription.SubscriptionChannelType;
import org.hl7.fhir.r4.model.Subscription.SubscriptionStatus;
import org.hl7.fhir.r4.model.Substance;
import org.hl7.fhir.r4.model.Task;
import org.hl7.fhir.r4.model.Timing;
import org.hl7.fhir.r4.model.ValueSet;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@ -108,6 +181,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
@Before
public void beforeDisableCacheReuse() {
myModelConfig.setSuppressStringIndexingInTokens(new ModelConfig().isSuppressStringIndexingInTokens());
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
}
@ -4353,15 +4427,58 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
assertEquals(1, results.getResources(0, 10).size());
// We expect a new one because we don't cache the search URL for very long search URLs
assertEquals(2, mySearchEntityDao.count());
}
@Test
public void testTokenTextDisabled_Global() {
myModelConfig.setSuppressStringIndexingInTokens(true);
SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronous(true);
map.add(Observation.SP_CODE, new TokenParam("hello").setModifier(TokenParamModifier.TEXT));
try {
myObservationDao.search(map);
} catch (MethodNotAllowedException e) {
assertEquals("The :text modifier is disabled on this server", e.getMessage());
}
}
@Test
public void testTokenTextDisabled_ForSearchParam() {
{
SearchParameter sp = new SearchParameter();
sp.setId("observation-code");
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
sp.addBase("Observation");
sp.setType(Enumerations.SearchParamType.TOKEN);
sp.setCode("code");
sp.setExpression("Observation.code");
sp.addExtension()
.setUrl(JpaConstants.EXT_SEARCHPARAM_TOKEN_SUPPRESS_TEXT_INDEXING)
.setValue(new BooleanType(true));
ourLog.info("SP:\n{}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(sp));
mySearchParameterDao.update(sp);
mySearchParamRegistry.forceRefresh();
}
SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronous(true);
map.add(Observation.SP_CODE, new TokenParam("hello").setModifier(TokenParamModifier.TEXT));
try {
myObservationDao.search(map);
} catch (MethodNotAllowedException e) {
assertEquals("The :text modifier is disabled for this search parameter", e.getMessage());
}
}
@Test
public void testDateSearchParametersShouldBeTimezoneIndependent() {
createObservationWithEffective("NO1", "2011-01-02T23:00:00-11:30");
createObservationWithEffective("NO2", "2011-01-03T00:00:00+01:00");
createObservationWithEffective("NO1", "2011-01-03T00:00:00+01:00");
createObservationWithEffective("YES00", "2011-01-02T23:00:00-11:30");
createObservationWithEffective("YES01", "2011-01-02T00:00:00-11:30");
createObservationWithEffective("YES02", "2011-01-02T00:00:00-10:00");
createObservationWithEffective("YES03", "2011-01-02T00:00:00-09:00");
@ -4394,6 +4511,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
List<String> values = toUnqualifiedVersionlessIdValues(results);
Collections.sort(values);
assertThat(values.toString(), values, contains(
"Observation/YES00",
"Observation/YES01",
"Observation/YES02",
"Observation/YES03",
@ -4420,6 +4538,68 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
));
}
@Test
public void testDateSearchParametersShouldBeHourIndependent() {
createObservationWithEffective("YES01", "2011-01-02T00:00:00");
createObservationWithEffective("YES02", "2011-01-02T01:00:00");
createObservationWithEffective("YES03", "2011-01-02T02:00:00");
createObservationWithEffective("YES04", "2011-01-02T03:00:00");
createObservationWithEffective("YES05", "2011-01-02T04:00:00");
createObservationWithEffective("YES06", "2011-01-02T05:00:00");
createObservationWithEffective("YES07", "2011-01-02T06:00:00");
createObservationWithEffective("YES08", "2011-01-02T07:00:00");
createObservationWithEffective("YES09", "2011-01-02T08:00:00");
createObservationWithEffective("YES10", "2011-01-02T09:00:00");
createObservationWithEffective("YES11", "2011-01-02T10:00:00");
createObservationWithEffective("YES12", "2011-01-02T11:00:00");
createObservationWithEffective("YES13", "2011-01-02T12:00:00");
createObservationWithEffective("YES14", "2011-01-02T13:00:00");
createObservationWithEffective("YES15", "2011-01-02T14:00:00");
createObservationWithEffective("YES16", "2011-01-02T15:00:00");
createObservationWithEffective("YES17", "2011-01-02T16:00:00");
createObservationWithEffective("YES18", "2011-01-02T17:00:00");
createObservationWithEffective("YES19", "2011-01-02T18:00:00");
createObservationWithEffective("YES20", "2011-01-02T19:00:00");
createObservationWithEffective("YES21", "2011-01-02T20:00:00");
createObservationWithEffective("YES22", "2011-01-02T21:00:00");
createObservationWithEffective("YES23", "2011-01-02T22:00:00");
createObservationWithEffective("YES24", "2011-01-02T23:00:00");
SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronous(true);
map.add(Observation.SP_DATE, new DateParam("2011-01-02"));
IBundleProvider results = myObservationDao.search(map);
List<String> values = toUnqualifiedVersionlessIdValues(results);
Collections.sort(values);
assertThat(values.toString(), values, contains(
"Observation/YES01",
"Observation/YES02",
"Observation/YES03",
"Observation/YES04",
"Observation/YES05",
"Observation/YES06",
"Observation/YES07",
"Observation/YES08",
"Observation/YES09",
"Observation/YES10",
"Observation/YES11",
"Observation/YES12",
"Observation/YES13",
"Observation/YES14",
"Observation/YES15",
"Observation/YES16",
"Observation/YES17",
"Observation/YES18",
"Observation/YES19",
"Observation/YES20",
"Observation/YES21",
"Observation/YES22",
"Observation/YES23",
"Observation/YES24"
));
}
private void createObservationWithEffective(String theId, String theEffective) {
Observation obs = new Observation();
obs.setId(theId);

Some files were not shown because too many files have changed in this diff Show More