Merge branch 'master' into ja_20240604_allow_disable_param
This commit is contained in:
commit
b094163e53
|
@ -10,3 +10,4 @@ distributed under the License is distributed on an "AS IS" BASIS,
|
|||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1071,8 +1071,9 @@ public interface IValidationSupport {
|
|||
}
|
||||
}
|
||||
|
||||
public void setErrorMessage(String theErrorMessage) {
|
||||
public LookupCodeResult setErrorMessage(String theErrorMessage) {
|
||||
myErrorMessage = theErrorMessage;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getErrorMessage() {
|
||||
|
|
|
@ -2515,6 +2515,22 @@ public enum Pointcut implements IPointcut {
|
|||
MDM_SUBMIT(
|
||||
void.class, "ca.uhn.fhir.rest.api.server.RequestDetails", "ca.uhn.fhir.mdm.model.mdmevents.MdmSubmitEvent"),
|
||||
|
||||
/**
|
||||
* <b>MDM_SUBMIT_PRE_MESSAGE_DELIVERY Hook:</b>
|
||||
* Invoked immediately before the delivery of a MESSAGE to the broker.
|
||||
* <p>
|
||||
* Hooks can make changes to the delivery payload.
|
||||
* Furthermore, modification can be made to the outgoing message,
|
||||
* for example adding headers or changing message key,
|
||||
* which will be used for the subsequent processing.
|
||||
* </p>
|
||||
* Hooks should accept the following parameters:
|
||||
* <ul>
|
||||
* <li>ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage</li>
|
||||
* </ul>
|
||||
*/
|
||||
MDM_SUBMIT_PRE_MESSAGE_DELIVERY(void.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage"),
|
||||
|
||||
/**
|
||||
* <b>JPA Hook:</b>
|
||||
* This hook is invoked when a cross-partition reference is about to be
|
||||
|
|
|
@ -40,6 +40,7 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
|
||||
|
||||
|
@ -98,6 +99,33 @@ public class RequestPartitionId implements IModelJson {
|
|||
myAllPartitions = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new RequestPartitionId which includes all partition IDs from
|
||||
* this {@link RequestPartitionId} but also includes all IDs from the given
|
||||
* {@link RequestPartitionId}. Any duplicates are only included once, and
|
||||
* partition names and dates are ignored and not returned. This {@link RequestPartitionId}
|
||||
* and {@literal theOther} are not modified.
|
||||
*
|
||||
* @since 7.4.0
|
||||
*/
|
||||
public RequestPartitionId mergeIds(RequestPartitionId theOther) {
|
||||
if (isAllPartitions() || theOther.isAllPartitions()) {
|
||||
return RequestPartitionId.allPartitions();
|
||||
}
|
||||
|
||||
// don't know why this is required - otherwise PartitionedStrictTransactionR4Test fails
|
||||
if (this.equals(theOther)) {
|
||||
return this;
|
||||
}
|
||||
|
||||
List<Integer> thisPartitionIds = getPartitionIds();
|
||||
List<Integer> otherPartitionIds = theOther.getPartitionIds();
|
||||
List<Integer> newPartitionIds = Stream.concat(thisPartitionIds.stream(), otherPartitionIds.stream())
|
||||
.distinct()
|
||||
.collect(Collectors.toList());
|
||||
return RequestPartitionId.fromPartitionIds(newPartitionIds);
|
||||
}
|
||||
|
||||
public static RequestPartitionId fromJson(String theJson) throws JsonProcessingException {
|
||||
return ourObjectMapper.readValue(theJson, RequestPartitionId.class);
|
||||
}
|
||||
|
@ -332,6 +360,14 @@ public class RequestPartitionId implements IModelJson {
|
|||
return new RequestPartitionId(thePartitionNames, thePartitionIds, thePartitionDate);
|
||||
}
|
||||
|
||||
public static boolean isDefaultPartition(@Nullable RequestPartitionId thePartitionId) {
|
||||
if (thePartitionId == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return thePartitionId.isDefaultPartition();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a string representation suitable for use as a cache key. Null aware.
|
||||
* <p>
|
||||
|
|
|
@ -230,17 +230,41 @@ public abstract class BaseDateTimeDt extends BasePrimitive<Date> {
|
|||
return Long.parseLong(retVal);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the offset for a timestamp. If it exists. An offset may start either with '-', 'Z', '+', or ' '.
|
||||
* <p/>
|
||||
* There is a special case where ' ' is considered a valid offset initial character and this is because when
|
||||
* handling URLs with timestamps, '+' is considered an escape character for ' ', so '+' may have been replaced with
|
||||
* ' ' by the time execution reaches this method. This is why this method handles both characters.
|
||||
*
|
||||
* @param theValueString A timestamp containing either a timezone offset or nothing.
|
||||
* @return The index of the offset portion of the timestamp, if applicable, otherwise -1
|
||||
*/
|
||||
private int getOffsetIndex(String theValueString) {
|
||||
int plusIndex = theValueString.indexOf('+', 16);
|
||||
int spaceIndex = theValueString.indexOf(' ', 16);
|
||||
int minusIndex = theValueString.indexOf('-', 16);
|
||||
int zIndex = theValueString.indexOf('Z', 16);
|
||||
int retVal = Math.max(Math.max(plusIndex, minusIndex), zIndex);
|
||||
if (retVal == -1) {
|
||||
int maxIndexPlusAndMinus = Math.max(Math.max(plusIndex, minusIndex), zIndex);
|
||||
int maxIndexSpaceAndMinus = Math.max(Math.max(spaceIndex, minusIndex), zIndex);
|
||||
if (maxIndexPlusAndMinus == -1 && maxIndexSpaceAndMinus == -1) {
|
||||
return -1;
|
||||
}
|
||||
if ((retVal - 2) != (plusIndex + minusIndex + zIndex)) {
|
||||
throwBadDateFormat(theValueString);
|
||||
int retVal = 0;
|
||||
if (maxIndexPlusAndMinus != -1) {
|
||||
if ((maxIndexPlusAndMinus - 2) != (plusIndex + minusIndex + zIndex)) {
|
||||
throwBadDateFormat(theValueString);
|
||||
}
|
||||
retVal = maxIndexPlusAndMinus;
|
||||
}
|
||||
|
||||
if (maxIndexSpaceAndMinus != -1) {
|
||||
if ((maxIndexSpaceAndMinus - 2) != (spaceIndex + minusIndex + zIndex)) {
|
||||
throwBadDateFormat(theValueString);
|
||||
}
|
||||
retVal = maxIndexSpaceAndMinus;
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
@ -574,13 +598,15 @@ public abstract class BaseDateTimeDt extends BasePrimitive<Date> {
|
|||
setTimeZoneZulu(true);
|
||||
} else if (theValue.length() != 6) {
|
||||
throwBadDateFormat(theWholeValue, "Timezone offset must be in the form \"Z\", \"-HH:mm\", or \"+HH:mm\"");
|
||||
} else if (theValue.charAt(3) != ':' || !(theValue.charAt(0) == '+' || theValue.charAt(0) == '-')) {
|
||||
} else if (theValue.charAt(3) != ':'
|
||||
|| !(theValue.charAt(0) == '+' || theValue.charAt(0) == ' ' || theValue.charAt(0) == '-')) {
|
||||
throwBadDateFormat(theWholeValue, "Timezone offset must be in the form \"Z\", \"-HH:mm\", or \"+HH:mm\"");
|
||||
} else {
|
||||
parseInt(theWholeValue, theValue.substring(1, 3), 0, 23);
|
||||
parseInt(theWholeValue, theValue.substring(4, 6), 0, 59);
|
||||
clearTimeZone();
|
||||
setTimeZone(getTimeZone("GMT" + theValue));
|
||||
final String valueToUse = theValue.startsWith(" ") ? theValue.replace(' ', '+') : theValue;
|
||||
setTimeZone(getTimeZone("GMT" + valueToUse));
|
||||
}
|
||||
|
||||
return this;
|
||||
|
|
|
@ -0,0 +1,696 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.repository;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.NotImplementedOperationException;
|
||||
import com.google.common.annotations.Beta;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseConformance;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This API is under-going active development, so it should be considered beta-level.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* This interface is a Java rendition of the FHIR REST API. All FHIR operations are defined at the
|
||||
* HTTP level, which is convenient from the specification point-of-view since FHIR is built on top
|
||||
* of web standards. This does mean that a few HTTP specific considerations, such as transmitting
|
||||
* side-band information through the HTTP headers, bleeds into this API.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* One particularly odd case are FHIR Bundle links. The specification describes these as opaque to
|
||||
* the end-user, so a given FHIR repository implementation must be able to resolve those directly.
|
||||
* See {@link Repository#link(Class, String)}
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* This interface also chooses to ignore return headers for most cases, preferring to return the
|
||||
* Java objects directly. In cases where this is not possible, or the additional headers are crucial
|
||||
* information, HAPI's {@link MethodOutcome} is used.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* Implementations of this interface should prefer to throw the exceptions derived from
|
||||
* {@link ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException}
|
||||
*
|
||||
* All operations may throw {@link AuthenticationException}, {@link ForbiddenOperationException}, or
|
||||
* {@link InternalErrorException} in addition to operation-specific exceptions.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* If a given operation is not supported, implementations should throw an
|
||||
* {@link NotImplementedOperationException}. The capabilities operation, if supported, should return
|
||||
* the set of supported interactions. If capabilities is not supported, the components in this
|
||||
* repository will try to invoke operations with "sensible" defaults. For example, by using the
|
||||
* standard FHIR search parameters. Discussion is on-going to determine what a "sensible" minimal
|
||||
* level of support for interactions should be.
|
||||
* </p>
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html">FHIR REST API</a>
|
||||
*/
|
||||
@Beta
|
||||
public interface Repository {
|
||||
|
||||
// CRUD starts here
|
||||
|
||||
/**
|
||||
* Reads a resource from the repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#read">FHIR read</a>
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#vread">FHIR vRead</a>
|
||||
*
|
||||
* @param <T> a Resource type
|
||||
* @param <I> an Id type
|
||||
* @param resourceType the class of the Resource type to read
|
||||
* @param id the id of the Resource to read
|
||||
* @return the Resource
|
||||
*/
|
||||
default <T extends IBaseResource, I extends IIdType> T read(Class<T> resourceType, I id) {
|
||||
return this.read(resourceType, id, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a Resource from the repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#read">FHIR read</a>
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#vread">FHIR vRead</a>
|
||||
*
|
||||
* @param <T> a Resource type
|
||||
* @param <I> an Id type
|
||||
* @param resourceType the class of the Resource type to read
|
||||
* @param id the id of the Resource to read
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return the Resource
|
||||
*/
|
||||
<T extends IBaseResource, I extends IIdType> T read(Class<T> resourceType, I id, Map<String, String> headers);
|
||||
|
||||
/**
|
||||
* Creates a Resource in the repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#create">FHIR create</a>
|
||||
*
|
||||
* @param <T> a Resource type
|
||||
* @param resource the Resource to create
|
||||
* @return a MethodOutcome with the id of the created Resource
|
||||
*/
|
||||
default <T extends IBaseResource> MethodOutcome create(T resource) {
|
||||
return this.create(resource, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Resource in the repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#create">FHIR create</a>
|
||||
*
|
||||
* @param <T> a Resource type
|
||||
* @param resource the Resource to create
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a MethodOutcome with the id of the created Resource
|
||||
*/
|
||||
<T extends IBaseResource> MethodOutcome create(T resource, Map<String, String> headers);
|
||||
|
||||
/**
|
||||
* Patches a Resource in the repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#patch">FHIR patch</a>
|
||||
*
|
||||
* @param <I> an Id type
|
||||
* @param <P> a Parameters type
|
||||
* @param id the id of the Resource to patch
|
||||
* @param patchParameters parameters describing the patches to apply
|
||||
* @return a MethodOutcome with the id of the patched resource
|
||||
*/
|
||||
default <I extends IIdType, P extends IBaseParameters> MethodOutcome patch(I id, P patchParameters) {
|
||||
return this.patch(id, patchParameters, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Patches a Resource in the repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#patch">FHIR patch</a>
|
||||
*
|
||||
* @param <I> an Id type
|
||||
* @param <P> a Parameters type
|
||||
* @param id the id of the Resource to patch
|
||||
* @param patchParameters parameters describing the patches to apply
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a MethodOutcome with the id of the patched resource
|
||||
*/
|
||||
default <I extends IIdType, P extends IBaseParameters> MethodOutcome patch(
|
||||
I id, P patchParameters, Map<String, String> headers) {
|
||||
return throwNotImplementedOperationException("patch is not supported by this repository");
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Resource in the repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#update">FHIR update</a>
|
||||
*
|
||||
* @param <T> a Resource type
|
||||
* @param resource the Resource to update
|
||||
* @return a MethodOutcome with the id of the updated Resource
|
||||
*/
|
||||
default <T extends IBaseResource> MethodOutcome update(T resource) {
|
||||
return this.update(resource, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Resource in the repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#update">FHIR update</a>
|
||||
*
|
||||
* @param <T> a Resource type
|
||||
* @param resource the Resource to update
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a MethodOutcome with the id of the updated Resource
|
||||
*/
|
||||
<T extends IBaseResource> MethodOutcome update(T resource, Map<String, String> headers);
|
||||
|
||||
/**
|
||||
* Deletes a Resource in the repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#delete">FHIR delete</a>
|
||||
*
|
||||
* @param <T> a Resource type
|
||||
* @param <I> an Id type
|
||||
* @param resourceType the class of the Resource type to delete
|
||||
* @param id the id of the Resource to delete
|
||||
* @return a MethodOutcome with the id of the deleted resource
|
||||
*/
|
||||
default <T extends IBaseResource, I extends IIdType> MethodOutcome delete(Class<T> resourceType, I id) {
|
||||
return this.delete(resourceType, id, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a Resource in the repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#delete">FHIR delete</a>
|
||||
*
|
||||
* @param <T> a Resource type
|
||||
* @param <I> an Id type
|
||||
* @param resourceType the class of the Resource type to delete
|
||||
* @param id the id of the Resource to delete
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a MethodOutcome with the id of the deleted resource
|
||||
*/
|
||||
<T extends IBaseResource, I extends IIdType> MethodOutcome delete(
|
||||
Class<T> resourceType, I id, Map<String, String> headers);
|
||||
|
||||
// Querying starts here
|
||||
|
||||
/**
|
||||
* Searches this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#search">FHIR search</a>
|
||||
*
|
||||
* @param <B> a Bundle type
|
||||
* @param <T> a Resource type
|
||||
* @param bundleType the class of the Bundle type to return
|
||||
* @param resourceType the class of the Resource type to search
|
||||
* @param searchParameters the searchParameters for this search
|
||||
* @return a Bundle with the results of the search
|
||||
*/
|
||||
default <B extends IBaseBundle, T extends IBaseResource> B search(
|
||||
Class<B> bundleType, Class<T> resourceType, Map<String, List<IQueryParameterType>> searchParameters) {
|
||||
return this.search(bundleType, resourceType, searchParameters, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#search">FHIR search</a>
|
||||
*
|
||||
* @param <B> a Bundle type
|
||||
* @param <T> a Resource type
|
||||
* @param bundleType the class of the Bundle type to return
|
||||
* @param resourceType the class of the Resource type to search
|
||||
* @param searchParameters the searchParameters for this search
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a Bundle with the results of the search
|
||||
*/
|
||||
<B extends IBaseBundle, T extends IBaseResource> B search(
|
||||
Class<B> bundleType,
|
||||
Class<T> resourceType,
|
||||
Map<String, List<IQueryParameterType>> searchParameters,
|
||||
Map<String, String> headers);
|
||||
|
||||
// Paging starts here
|
||||
|
||||
/**
|
||||
* Reads a Bundle from a link on this repository
|
||||
*
|
||||
* This is typically used for paging during searches
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/bundle-definitions.html#Bundle.link">FHIR Bundle
|
||||
* link</a>
|
||||
*
|
||||
* @param <B> a Bundle type
|
||||
* @param url the url of the Bundle to load
|
||||
* @return a Bundle
|
||||
*/
|
||||
default <B extends IBaseBundle> B link(Class<B> bundleType, String url) {
|
||||
return this.link(bundleType, url, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a Bundle from a link on this repository
|
||||
*
|
||||
* This is typically used for paging during searches
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/bundle-definitions.html#Bundle.link">FHIR Bundle
|
||||
* link</a>
|
||||
*
|
||||
* @param <B> a Bundle type
|
||||
* @param url the url of the Bundle to load
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a Bundle
|
||||
*/
|
||||
default <B extends IBaseBundle> B link(Class<B> bundleType, String url, Map<String, String> headers) {
|
||||
return throwNotImplementedOperationException("link is not supported by this repository");
|
||||
}
|
||||
|
||||
// Metadata starts here
|
||||
|
||||
/**
|
||||
* Returns the CapabilityStatement/Conformance metadata for this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#capabilities">FHIR capabilities</a>
|
||||
*
|
||||
* @param <C> a CapabilityStatement/Conformance type
|
||||
* @param resourceType the class of the CapabilityStatement/Conformance to return
|
||||
* @return a CapabilityStatement/Conformance with the repository's metadata
|
||||
*/
|
||||
default <C extends IBaseConformance> C capabilities(Class<C> resourceType) {
|
||||
return this.capabilities(resourceType, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the CapabilityStatement/Conformance metadata for this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#capabilities">FHIR capabilities</a>
|
||||
*
|
||||
* @param <C> a CapabilityStatement/Conformance type
|
||||
* @param resourceType the class of the CapabilityStatement/Conformance to return
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a CapabilityStatement/Conformance with the repository's metadata
|
||||
*/
|
||||
default <C extends IBaseConformance> C capabilities(Class<C> resourceType, Map<String, String> headers) {
|
||||
return throwNotImplementedOperationException("capabilities is not supported by this repository");
|
||||
}
|
||||
|
||||
// Transactions starts here
|
||||
|
||||
/**
|
||||
* Performs a transaction or batch on this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#transaction">FHIR transaction</a>
|
||||
*
|
||||
* @param <B> a Bundle type
|
||||
* @param transaction a Bundle with the transaction/batch
|
||||
* @return a Bundle with the results of the transaction/batch
|
||||
*/
|
||||
default <B extends IBaseBundle> B transaction(B transaction) {
|
||||
return this.transaction(transaction, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a transaction or batch on this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#transaction">FHIR transaction</a>
|
||||
*
|
||||
* @param <B> a Bundle type
|
||||
* @param transaction a Bundle with the transaction/batch
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a Bundle with the results of the transaction/batch
|
||||
*/
|
||||
default <B extends IBaseBundle> B transaction(B transaction, Map<String, String> headers) {
|
||||
return throwNotImplementedOperationException("transaction is not supported by this repository");
|
||||
}
|
||||
|
||||
// Operations starts here
|
||||
|
||||
/**
|
||||
* Invokes a server-level operation on this repository that returns a Resource
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/operations.html">FHIR operations</a>
|
||||
*
|
||||
* @param <R> a Resource type to return
|
||||
* @param <P> a Parameters type for operation parameters
|
||||
* @param name the name of the operation to invoke
|
||||
* @param parameters the operation parameters
|
||||
* @param returnType the class of the Resource the operation returns
|
||||
* @return the results of the operation
|
||||
*/
|
||||
default <R extends IBaseResource, P extends IBaseParameters> R invoke(
|
||||
String name, P parameters, Class<R> returnType) {
|
||||
return this.invoke(name, parameters, returnType, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes a server-level operation on this repository that returns a Resource
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/operations.html">FHIR operations</a>
|
||||
*
|
||||
* @param <R> a Resource type to return
|
||||
* @param <P> a Parameters type for operation parameters
|
||||
* @param name the name of the operation to invoke
|
||||
* @param parameters the operation parameters
|
||||
* @param returnType the class of the Resource the operation returns
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return the results of the operation
|
||||
*/
|
||||
default <R extends IBaseResource, P extends IBaseParameters> R invoke(
|
||||
String name, P parameters, Class<R> returnType, Map<String, String> headers) {
|
||||
return throwNotImplementedOperationException("server-level invoke is not supported by this repository");
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes a server-level operation on this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/operations.html">FHIR operations</a>
|
||||
*
|
||||
* @param <P> a Parameters type for operation parameters
|
||||
* @param name the name of the operation to invoke
|
||||
* @param parameters the operation parameters
|
||||
* @return a MethodOutcome with a status code
|
||||
*/
|
||||
default <P extends IBaseParameters> MethodOutcome invoke(String name, P parameters) {
|
||||
return this.invoke(name, parameters, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes a server-level operation on this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/operations.html">FHIR operations</a>
|
||||
*
|
||||
* @param <P> a Parameters type for operation parameters
|
||||
* @param name the name of the operation to invoke
|
||||
* @param parameters the operation parameters
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a MethodOutcome with a status code
|
||||
*/
|
||||
default <P extends IBaseParameters> MethodOutcome invoke(String name, P parameters, Map<String, String> headers) {
|
||||
return throwNotImplementedOperationException("server-level invoke is not supported by this repository");
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes a type-level operation on this repository that returns a Resource
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/operations.html">FHIR operations</a>
|
||||
*
|
||||
* @param <R> a Resource type to return
|
||||
* @param <P> a Parameters type for operation parameters
|
||||
* @param <T> a Resource type to do the invocation for
|
||||
* @param resourceType the class of the Resource to do the invocation for
|
||||
* @param name the name of the operation to invoke
|
||||
* @param parameters the operation parameters
|
||||
* @param returnType the class of the Resource the operation returns
|
||||
* @return the results of the operation
|
||||
*/
|
||||
default <R extends IBaseResource, P extends IBaseParameters, T extends IBaseResource> R invoke(
|
||||
Class<T> resourceType, String name, P parameters, Class<R> returnType) {
|
||||
return this.invoke(resourceType, name, parameters, returnType, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes a type-level operation on this repository that returns a Resource
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/operations.html">FHIR operations</a>
|
||||
*
|
||||
* @param <R> a Resource type to return
|
||||
* @param <P> a Parameters type for operation parameters
|
||||
* @param <T> a Resource type to do the invocation for
|
||||
* @param resourceType the class of the Resource to do the invocation for
|
||||
* @param name the name of the operation to invoke
|
||||
* @param parameters the operation parameters
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @param returnType the class of the Resource the operation returns
|
||||
* @return the results of the operation
|
||||
*/
|
||||
<R extends IBaseResource, P extends IBaseParameters, T extends IBaseResource> R invoke(
|
||||
Class<T> resourceType, String name, P parameters, Class<R> returnType, Map<String, String> headers);
|
||||
|
||||
/**
|
||||
* Invokes a type-level operation on this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/operations.html">FHIR operations</a>
|
||||
*
|
||||
* @param <P> a Parameters type for operation parameters
|
||||
* @param <T> a Resource type to do the invocation for
|
||||
* @param resourceType the class of the Resource to do the invocation for
|
||||
* @param name the name of the operation to invoke
|
||||
* @param parameters the operation parameters
|
||||
* @return a MethodOutcome with a status code
|
||||
*/
|
||||
default <P extends IBaseParameters, T extends IBaseResource> MethodOutcome invoke(
|
||||
Class<T> resourceType, String name, P parameters) {
|
||||
return this.invoke(resourceType, name, parameters, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes a type-level operation on this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/operations.html">FHIR operations</a>
|
||||
*
|
||||
* @param <P> a Parameters type for operation parameters
|
||||
* @param <T> a Resource type to do the invocation for
|
||||
* @param resourceType the class of the Resource to do the invocation for
|
||||
* @param name the name of the operation to invoke
|
||||
* @param parameters the operation parameters
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a MethodOutcome with a status code
|
||||
*/
|
||||
default <P extends IBaseParameters, T extends IBaseResource> MethodOutcome invoke(
|
||||
Class<T> resourceType, String name, P parameters, Map<String, String> headers) {
|
||||
return throwNotImplementedOperationException("type-level invoke is not supported by this repository");
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes an instance-level operation on this repository that returns a Resource
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/operations.html">FHIR operations</a>
|
||||
*
|
||||
* @param <R> a Resource type to return
|
||||
* @param <P> a Parameters type for operation parameters
|
||||
* @param <I> an Id type
|
||||
* @param id the id of the Resource to do the invocation on
|
||||
* @param name the name of the operation to invoke
|
||||
* @param parameters the operation parameters
|
||||
* @param returnType the class of the Resource the operation returns
|
||||
* @return the results of the operation
|
||||
*/
|
||||
default <R extends IBaseResource, P extends IBaseParameters, I extends IIdType> R invoke(
|
||||
I id, String name, P parameters, Class<R> returnType) {
|
||||
return this.invoke(id, name, parameters, returnType, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes an instance-level operation on this repository that returns a Resource
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/operations.html">FHIR operations</a>
|
||||
*
|
||||
* @param <R> a Resource type to return
|
||||
* @param <P> a Parameters type for operation parameters
|
||||
* @param <I> an Id type
|
||||
* @param id the id of the Resource to do the invocation on
|
||||
* @param name the name of the operation to invoke
|
||||
* @param parameters the operation parameters
|
||||
* @param returnType the class of the Resource the operation returns
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return the results of the operation
|
||||
*/
|
||||
<R extends IBaseResource, P extends IBaseParameters, I extends IIdType> R invoke(
|
||||
I id, String name, P parameters, Class<R> returnType, Map<String, String> headers);
|
||||
|
||||
/**
|
||||
* Invokes an instance-level operation on this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/operations.html">FHIR operations</a>
|
||||
*
|
||||
* @param <P> a Parameters type for operation parameters
|
||||
* @param <I> an Id type
|
||||
* @param id the id of the Resource to do the invocation on
|
||||
* @param name the name of the operation to invoke
|
||||
* @param parameters the operation parameters
|
||||
* @return a MethodOutcome with a status code
|
||||
*/
|
||||
default <P extends IBaseParameters, I extends IIdType> MethodOutcome invoke(I id, String name, P parameters) {
|
||||
return this.invoke(id, name, parameters, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes an instance-level operation on this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/operations.html">FHIR operations</a>
|
||||
*
|
||||
* @param <P> a Parameters type for operation parameters
|
||||
* @param <I> an Id type
|
||||
* @param id the id of the Resource to do the invocation on
|
||||
* @param name the name of the operation to invoke
|
||||
* @param parameters the operation parameters
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a MethodOutcome with a status code
|
||||
*/
|
||||
default <P extends IBaseParameters, I extends IIdType> MethodOutcome invoke(
|
||||
I id, String name, P parameters, Map<String, String> headers) {
|
||||
return throwNotImplementedOperationException("instance-level invoke is not supported by this repository");
|
||||
}
|
||||
|
||||
// History starts here
|
||||
|
||||
/**
|
||||
* Returns a Bundle with server-level history for this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#history">FHIR history</a>
|
||||
*
|
||||
* @param <B> a Bundle type to return
|
||||
* @param <P> a Parameters type for input parameters
|
||||
* @param parameters the parameters for this history interaction
|
||||
* @param returnType the class of the Bundle type to return
|
||||
* @return a Bundle with the server history
|
||||
*/
|
||||
default <B extends IBaseBundle, P extends IBaseParameters> B history(P parameters, Class<B> returnType) {
|
||||
return this.history(parameters, returnType, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a Bundle with server-level history for this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#history">FHIR history</a>
|
||||
*
|
||||
* @param <B> a Bundle type to return
|
||||
* @param <P> a Parameters type for input parameters
|
||||
* @param parameters the parameters for this history interaction
|
||||
* @param returnType the class of the Bundle type to return
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a Bundle with the server history
|
||||
*/
|
||||
default <B extends IBaseBundle, P extends IBaseParameters> B history(
|
||||
P parameters, Class<B> returnType, Map<String, String> headers) {
|
||||
return throwNotImplementedOperationException("server-level history is not supported by this repository");
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a Bundle with type-level history for this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#history">FHIR history</a>
|
||||
*
|
||||
* @param <B> a Bundle type to return
|
||||
* @param <P> a Parameters type for input parameters
|
||||
* @param <T> a Resource type to produce history for
|
||||
* @param resourceType the class of the Resource type to produce history for
|
||||
* @param parameters the parameters for this history interaction
|
||||
* @param returnType the class of the Bundle type to return
|
||||
* @return a Bundle with the type history
|
||||
*/
|
||||
default <B extends IBaseBundle, P extends IBaseParameters, T extends IBaseResource> B history(
|
||||
Class<T> resourceType, P parameters, Class<B> returnType) {
|
||||
return this.history(resourceType, parameters, returnType, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a Bundle with type-level history for this repository
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#history">FHIR history</a>
|
||||
*
|
||||
* @param <B> a Bundle type to return
|
||||
* @param <P> a Parameters type for input parameters
|
||||
* @param <T> a Resource type to produce history for
|
||||
* @param resourceType the class of the Resource type to produce history for
|
||||
* @param parameters the parameters for this history interaction
|
||||
* @param returnType the class of the Bundle type to return
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a Bundle with the type history
|
||||
*/
|
||||
default <B extends IBaseBundle, P extends IBaseParameters, T extends IBaseResource> B history(
|
||||
Class<T> resourceType, P parameters, Class<B> returnType, Map<String, String> headers) {
|
||||
return throwNotImplementedOperationException("type-level history is not supported by this repository");
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a Bundle with instance-level history
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#history">FHIR history</a>
|
||||
*
|
||||
* @param <B> a Bundle type to return
|
||||
* @param <P> a Parameters type for input parameters
|
||||
* @param <I> an Id type for the Resource to produce history for
|
||||
* @param id the id of the Resource type to produce history for
|
||||
* @param parameters the parameters for this history interaction
|
||||
* @param returnType the class of the Bundle type to return
|
||||
* @return a Bundle with the instance history
|
||||
*/
|
||||
default <B extends IBaseBundle, P extends IBaseParameters, I extends IIdType> B history(
|
||||
I id, P parameters, Class<B> returnType) {
|
||||
return this.history(id, parameters, returnType, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a Bundle with instance-level history
|
||||
*
|
||||
* @see <a href="https://www.hl7.org/fhir/http.html#history">FHIR history</a>
|
||||
*
|
||||
* @param <B> a Bundle type to return
|
||||
* @param <P> a Parameters type for input parameters
|
||||
* @param <I> an Id type for the Resource to produce history for
|
||||
* @param id the id of the Resource type to produce history for
|
||||
* @param parameters the parameters for this history interaction
|
||||
* @param returnType the class of the Bundle type to return
|
||||
* @param headers headers for this request, typically key-value pairs of HTTP headers
|
||||
* @return a Bundle with the instance history
|
||||
*/
|
||||
default <B extends IBaseBundle, P extends IBaseParameters, I extends IIdType> B history(
|
||||
I id, P parameters, Class<B> returnType, Map<String, String> headers) {
|
||||
return throwNotImplementedOperationException("instance-level history is not supported by this repository");
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link FhirContext} used by the repository
|
||||
*
|
||||
* Practically, implementing FHIR functionality with the HAPI toolset requires a FhirContext. In
|
||||
* particular for things like version independent code. Ideally, a user could which FHIR version a
|
||||
* repository was configured for using things like the CapabilityStatement. In practice, that's
|
||||
* not widely implemented (yet) and it's expensive to create a new context with every call. We
|
||||
* will probably revisit this in the future.
|
||||
*
|
||||
* @return a FhirContext
|
||||
*/
|
||||
FhirContext fhirContext();
|
||||
|
||||
private static <T> T throwNotImplementedOperationException(String theMessage) {
|
||||
throw new NotImplementedOperationException(Msg.code(2542) + theMessage);
|
||||
}
|
||||
}
|
|
@ -101,7 +101,7 @@ public class FhirTerser {
|
|||
return newList;
|
||||
}
|
||||
|
||||
private ExtensionDt createEmptyExtensionDt(IBaseExtension theBaseExtension, String theUrl) {
|
||||
private ExtensionDt createEmptyExtensionDt(IBaseExtension<?, ?> theBaseExtension, String theUrl) {
|
||||
return createEmptyExtensionDt(theBaseExtension, false, theUrl);
|
||||
}
|
||||
|
||||
|
@ -122,13 +122,13 @@ public class FhirTerser {
|
|||
return theSupportsUndeclaredExtensions.addUndeclaredExtension(theIsModifier, theUrl);
|
||||
}
|
||||
|
||||
private IBaseExtension createEmptyExtension(IBaseHasExtensions theBaseHasExtensions, String theUrl) {
|
||||
return (IBaseExtension) theBaseHasExtensions.addExtension().setUrl(theUrl);
|
||||
private IBaseExtension<?, ?> createEmptyExtension(IBaseHasExtensions theBaseHasExtensions, String theUrl) {
|
||||
return (IBaseExtension<?, ?>) theBaseHasExtensions.addExtension().setUrl(theUrl);
|
||||
}
|
||||
|
||||
private IBaseExtension createEmptyModifierExtension(
|
||||
private IBaseExtension<?, ?> createEmptyModifierExtension(
|
||||
IBaseHasModifierExtensions theBaseHasModifierExtensions, String theUrl) {
|
||||
return (IBaseExtension)
|
||||
return (IBaseExtension<?, ?>)
|
||||
theBaseHasModifierExtensions.addModifierExtension().setUrl(theUrl);
|
||||
}
|
||||
|
||||
|
@ -407,7 +407,7 @@ public class FhirTerser {
|
|||
|
||||
public String getSinglePrimitiveValueOrNull(IBase theTarget, String thePath) {
|
||||
return getSingleValue(theTarget, thePath, IPrimitiveType.class)
|
||||
.map(t -> t.getValueAsString())
|
||||
.map(IPrimitiveType::getValueAsString)
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
|
@ -487,7 +487,7 @@ public class FhirTerser {
|
|||
} else {
|
||||
// DSTU3+
|
||||
final String extensionUrlForLambda = extensionUrl;
|
||||
List<IBaseExtension> extensions = Collections.emptyList();
|
||||
List<IBaseExtension<?, ?>> extensions = Collections.emptyList();
|
||||
if (theCurrentObj instanceof IBaseHasExtensions) {
|
||||
extensions = ((IBaseHasExtensions) theCurrentObj)
|
||||
.getExtension().stream()
|
||||
|
@ -505,7 +505,7 @@ public class FhirTerser {
|
|||
}
|
||||
}
|
||||
|
||||
for (IBaseExtension next : extensions) {
|
||||
for (IBaseExtension<?, ?> next : extensions) {
|
||||
if (theWantedClass.isAssignableFrom(next.getClass())) {
|
||||
retVal.add((T) next);
|
||||
}
|
||||
|
@ -581,7 +581,7 @@ public class FhirTerser {
|
|||
} else {
|
||||
// DSTU3+
|
||||
final String extensionUrlForLambda = extensionUrl;
|
||||
List<IBaseExtension> extensions = Collections.emptyList();
|
||||
List<IBaseExtension<?, ?>> extensions = Collections.emptyList();
|
||||
|
||||
if (theCurrentObj instanceof IBaseHasModifierExtensions) {
|
||||
extensions = ((IBaseHasModifierExtensions) theCurrentObj)
|
||||
|
@ -602,7 +602,7 @@ public class FhirTerser {
|
|||
}
|
||||
}
|
||||
|
||||
for (IBaseExtension next : extensions) {
|
||||
for (IBaseExtension<?, ?> next : extensions) {
|
||||
if (theWantedClass.isAssignableFrom(next.getClass())) {
|
||||
retVal.add((T) next);
|
||||
}
|
||||
|
@ -1203,7 +1203,6 @@ public class FhirTerser {
|
|||
public void visit(IBase theElement, IModelVisitor2 theVisitor) {
|
||||
BaseRuntimeElementDefinition<?> def = myContext.getElementDefinition(theElement.getClass());
|
||||
if (def instanceof BaseRuntimeElementCompositeDefinition) {
|
||||
BaseRuntimeElementCompositeDefinition<?> defComposite = (BaseRuntimeElementCompositeDefinition<?>) def;
|
||||
visit(theElement, null, def, theVisitor, new ArrayList<>(), new ArrayList<>(), new ArrayList<>());
|
||||
} else if (theElement instanceof IBaseExtension) {
|
||||
theVisitor.acceptUndeclaredExtension(
|
||||
|
@ -1562,7 +1561,7 @@ public class FhirTerser {
|
|||
throw new DataFormatException(Msg.code(1796) + "Invalid path " + thePath + ": Element of type "
|
||||
+ def.getName() + " has no child named " + nextPart + ". Valid names: "
|
||||
+ def.getChildrenAndExtension().stream()
|
||||
.map(t -> t.getElementName())
|
||||
.map(BaseRuntimeChildDefinition::getElementName)
|
||||
.sorted()
|
||||
.collect(Collectors.joining(", ")));
|
||||
}
|
||||
|
@ -1817,7 +1816,18 @@ public class FhirTerser {
|
|||
if (getResourceToIdMap() == null) {
|
||||
return null;
|
||||
}
|
||||
return getResourceToIdMap().get(theNext);
|
||||
|
||||
var idFromMap = getResourceToIdMap().get(theNext);
|
||||
if (idFromMap != null) {
|
||||
return idFromMap;
|
||||
} else if (theNext.getIdElement().getIdPart() != null) {
|
||||
return getResourceToIdMap().values().stream()
|
||||
.filter(id -> theNext.getIdElement().getIdPart().equals(id.getIdPart()))
|
||||
.findAny()
|
||||
.orElse(null);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private List<IBaseResource> getOrCreateResourceList() {
|
||||
|
|
|
@ -209,7 +209,7 @@ public class ParametersUtil {
|
|||
*
|
||||
* @param theContext The FhirContext
|
||||
* @param theParameters The Parameters resource
|
||||
* @param theName The parametr name
|
||||
* @param theName The parameter name
|
||||
* @param theValue The parameter value (can be a {@link IBaseResource resource} or a {@link IBaseDatatype datatype})
|
||||
*/
|
||||
public static void addParameterToParameters(
|
||||
|
@ -248,7 +248,7 @@ public class ParametersUtil {
|
|||
|
||||
private static IBase createParameterRepetition(
|
||||
FhirContext theContext,
|
||||
IBaseResource theTargetResource,
|
||||
IBase theTargetResource,
|
||||
BaseRuntimeChildDefinition paramChild,
|
||||
BaseRuntimeElementCompositeDefinition<?> paramChildElem,
|
||||
String theName) {
|
||||
|
@ -458,6 +458,17 @@ public class ParametersUtil {
|
|||
return part;
|
||||
}
|
||||
|
||||
public static IBase createPart(FhirContext theContext, IBase thePart, String theName) {
|
||||
BaseRuntimeElementCompositeDefinition<?> def =
|
||||
(BaseRuntimeElementCompositeDefinition<?>) theContext.getElementDefinition(thePart.getClass());
|
||||
BaseRuntimeChildDefinition partChild = def.getChildByName("part");
|
||||
|
||||
BaseRuntimeElementCompositeDefinition<?> partChildElem =
|
||||
(BaseRuntimeElementCompositeDefinition<?>) partChild.getChildByName("part");
|
||||
|
||||
return createParameterRepetition(theContext, thePart, partChild, partChildElem, theName);
|
||||
}
|
||||
|
||||
public static void addPartResource(
|
||||
FhirContext theContext, IBase theParameter, String theName, IBaseResource theValue) {
|
||||
BaseRuntimeElementCompositeDefinition<?> def =
|
||||
|
|
|
@ -65,7 +65,7 @@ public class SubscriptionUtil {
|
|||
populatePrimitiveValue(theContext, theSubscription, "status", theStatus);
|
||||
}
|
||||
|
||||
public static boolean isCrossPartition(IBaseResource theSubscription) {
|
||||
public static boolean isDefinedAsCrossPartitionSubcription(IBaseResource theSubscription) {
|
||||
if (theSubscription instanceof IBaseHasExtensions) {
|
||||
IBaseExtension extension = ExtensionUtil.getExtensionByUrl(
|
||||
theSubscription, HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION);
|
||||
|
|
|
@ -20,10 +20,12 @@ package ca.uhn.fhir.util;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import com.google.common.collect.Streams;
|
||||
import jakarta.annotation.Nonnull;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Stream;
|
||||
|
@ -57,4 +59,9 @@ public class TaskChunker<T> {
|
|||
public <T> Stream<List<T>> chunk(Stream<T> theStream, int theChunkSize) {
|
||||
return StreamUtil.partition(theStream, theChunkSize);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public void chunk(Iterator<T> theIterator, int theChunkSize, Consumer<List<T>> theListConsumer) {
|
||||
chunk(Streams.stream(theIterator), theChunkSize).forEach(theListConsumer);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -154,9 +154,14 @@ public enum VersionEnum {
|
|||
|
||||
V7_1_0,
|
||||
V7_2_0,
|
||||
V7_2_1,
|
||||
V7_2_2,
|
||||
|
||||
V7_3_0,
|
||||
V7_4_0;
|
||||
V7_4_0,
|
||||
|
||||
V7_5_0,
|
||||
V7_6_0;
|
||||
|
||||
public static VersionEnum latestVersion() {
|
||||
VersionEnum[] values = VersionEnum.values();
|
||||
|
|
|
@ -20,29 +20,115 @@
|
|||
package org.hl7.fhir.instance.model.api;
|
||||
|
||||
import ca.uhn.fhir.model.api.annotation.SearchParamDefinition;
|
||||
import ca.uhn.fhir.rest.gclient.DateClientParam;
|
||||
import ca.uhn.fhir.rest.gclient.TokenClientParam;
|
||||
import ca.uhn.fhir.rest.gclient.UriClientParam;
|
||||
|
||||
/**
|
||||
* An IBaseResource that has a FHIR version of DSTU3 or higher
|
||||
*/
|
||||
public interface IAnyResource extends IBaseResource {
|
||||
|
||||
String SP_RES_ID = "_id";
|
||||
/**
|
||||
* Search parameter constant for <b>_id</b>
|
||||
*/
|
||||
@SearchParamDefinition(name = "_id", path = "", description = "The ID of the resource", type = "token")
|
||||
String SP_RES_ID = "_id";
|
||||
@SearchParamDefinition(
|
||||
name = SP_RES_ID,
|
||||
path = "Resource.id",
|
||||
description = "The ID of the resource",
|
||||
type = "token")
|
||||
|
||||
/**
|
||||
* <b>Fluent Client</b> search parameter constant for <b>_id</b>
|
||||
* <p>
|
||||
* Description: <b>the _id of a resource</b><br>
|
||||
* Type: <b>string</b><br>
|
||||
* Path: <b>Resource._id</b><br>
|
||||
* Path: <b>Resource.id</b><br>
|
||||
* </p>
|
||||
*/
|
||||
TokenClientParam RES_ID = new TokenClientParam(IAnyResource.SP_RES_ID);
|
||||
|
||||
String SP_RES_LAST_UPDATED = "_lastUpdated";
|
||||
/**
|
||||
* Search parameter constant for <b>_lastUpdated</b>
|
||||
*/
|
||||
@SearchParamDefinition(
|
||||
name = SP_RES_LAST_UPDATED,
|
||||
path = "Resource.meta.lastUpdated",
|
||||
description = "Only return resources which were last updated as specified by the given range",
|
||||
type = "date")
|
||||
|
||||
/**
|
||||
* <b>Fluent Client</b> search parameter constant for <b>_lastUpdated</b>
|
||||
* <p>
|
||||
* Description: <b>The last updated date of a resource</b><br>
|
||||
* Type: <b>date</b><br>
|
||||
* Path: <b>Resource.meta.lastUpdated</b><br>
|
||||
* </p>
|
||||
*/
|
||||
DateClientParam RES_LAST_UPDATED = new DateClientParam(IAnyResource.SP_RES_LAST_UPDATED);
|
||||
|
||||
String SP_RES_TAG = "_tag";
|
||||
/**
|
||||
* Search parameter constant for <b>_tag</b>
|
||||
*/
|
||||
@SearchParamDefinition(
|
||||
name = SP_RES_TAG,
|
||||
path = "Resource.meta.tag",
|
||||
description = "The tag of the resource",
|
||||
type = "token")
|
||||
|
||||
/**
|
||||
* <b>Fluent Client</b> search parameter constant for <b>_tag</b>
|
||||
* <p>
|
||||
* Description: <b>The tag of a resource</b><br>
|
||||
* Type: <b>token</b><br>
|
||||
* Path: <b>Resource.meta.tag</b><br>
|
||||
* </p>
|
||||
*/
|
||||
TokenClientParam RES_TAG = new TokenClientParam(IAnyResource.SP_RES_TAG);
|
||||
|
||||
String SP_RES_PROFILE = "_profile";
|
||||
/**
|
||||
* Search parameter constant for <b>_profile</b>
|
||||
*/
|
||||
@SearchParamDefinition(
|
||||
name = SP_RES_PROFILE,
|
||||
path = "Resource.meta.profile",
|
||||
description = "The profile of the resource",
|
||||
type = "uri")
|
||||
|
||||
/**
|
||||
* <b>Fluent Client</b> search parameter constant for <b>_profile</b>
|
||||
* <p>
|
||||
* Description: <b>The profile of a resource</b><br>
|
||||
* Type: <b>uri</b><br>
|
||||
* Path: <b>Resource.meta.profile</b><br>
|
||||
* </p>
|
||||
*/
|
||||
UriClientParam RES_PROFILE = new UriClientParam(IAnyResource.SP_RES_PROFILE);
|
||||
|
||||
String SP_RES_SECURITY = "_security";
|
||||
/**
|
||||
* Search parameter constant for <b>_security</b>
|
||||
*/
|
||||
@SearchParamDefinition(
|
||||
name = SP_RES_SECURITY,
|
||||
path = "Resource.meta.security",
|
||||
description = "The security of the resource",
|
||||
type = "token")
|
||||
|
||||
/**
|
||||
* <b>Fluent Client</b> search parameter constant for <b>_security</b>
|
||||
* <p>
|
||||
* Description: <b>The security of a resource</b><br>
|
||||
* Type: <b>token</b><br>
|
||||
* Path: <b>Resource.meta.security</b><br>
|
||||
* </p>
|
||||
*/
|
||||
TokenClientParam RES_SECURITY = new TokenClientParam(IAnyResource.SP_RES_SECURITY);
|
||||
|
||||
String getId();
|
||||
|
||||
IIdType getIdElement();
|
||||
|
|
|
@ -6,6 +6,9 @@ org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyService.
|
|||
org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyService.mismatchCodeSystem=Inappropriate CodeSystem URL "{0}" for ValueSet: {1}
|
||||
org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyService.codeNotFoundInValueSet=Code "{0}" is not in valueset: {1}
|
||||
|
||||
org.hl7.fhir.common.hapi.validation.support.RemoteTerminologyServiceValidationSupport.unknownCodeInSystem=Unknown code "{0}#{1}". The Remote Terminology server {2} returned {3}
|
||||
org.hl7.fhir.common.hapi.validation.support.RemoteTerminologyServiceValidationSupport.unknownCodeInValueSet=Unknown code "{0}#{1}" for ValueSet with URL "{2}". The Remote Terminology server {3} returned {4}
|
||||
|
||||
ca.uhn.fhir.jpa.term.TermReadSvcImpl.expansionRefersToUnknownCs=Unknown CodeSystem URI "{0}" referenced from ValueSet
|
||||
ca.uhn.fhir.jpa.term.TermReadSvcImpl.valueSetNotYetExpanded=ValueSet "{0}" has not yet been pre-expanded. Performing in-memory expansion without parameters. Current status: {1} | {2}
|
||||
ca.uhn.fhir.jpa.term.TermReadSvcImpl.valueSetNotYetExpanded_OffsetNotAllowed=ValueSet expansion can not combine "offset" with "ValueSet.compose.exclude" unless the ValueSet has been pre-expanded. ValueSet "{0}" must be pre-expanded for this operation to work.
|
||||
|
@ -91,6 +94,7 @@ ca.uhn.fhir.jpa.dao.BaseStorageDao.inlineMatchNotSupported=Inline match URLs are
|
|||
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionOperationWithMultipleMatchFailure=Failed to {0} resource with match URL "{1}" because this search matched {2} resources
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteByUrlThresholdExceeded=Failed to DELETE resources with match URL "{0}" because the resolved number of resources: {1} exceeds the threshold of {2}
|
||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionOperationWithIdNotMatchFailure=Failed to {0} resource with match URL "{1}" because the matching resource does not match the provided ID
|
||||
ca.uhn.fhir.jpa.dao.BaseTransactionProcessor.multiplePartitionAccesses=Can not process transaction with {0} entries: Entries require access to multiple/conflicting partitions
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedNoId=Failed to {0} resource in transaction because no ID was provided
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.transactionOperationFailedUnknownId=Failed to {0} resource in transaction because no resource could be found with ID {1}
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.uniqueIndexConflictFailure=Can not create resource of type {0} as it would create a duplicate unique index matching query: {1} (existing index belongs to {2}, new unique index created by {3})
|
||||
|
|
|
@ -41,6 +41,50 @@ public class RequestPartitionIdTest {
|
|||
assertFalse(RequestPartitionId.forPartitionIdsAndNames(null, Lists.newArrayList(1, 2), null).isDefaultPartition());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeIds() {
|
||||
RequestPartitionId input0 = RequestPartitionId.fromPartitionIds(1, 2, 3);
|
||||
RequestPartitionId input1 = RequestPartitionId.fromPartitionIds(1, 2, 4);
|
||||
|
||||
RequestPartitionId actual = input0.mergeIds(input1);
|
||||
RequestPartitionId expected = RequestPartitionId.fromPartitionIds(1, 2, 3, 4);
|
||||
assertEquals(expected, actual);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeIds_ThisAllPartitions() {
|
||||
RequestPartitionId input0 = RequestPartitionId.allPartitions();
|
||||
RequestPartitionId input1 = RequestPartitionId.fromPartitionIds(1, 2, 4);
|
||||
|
||||
RequestPartitionId actual = input0.mergeIds(input1);
|
||||
RequestPartitionId expected = RequestPartitionId.allPartitions();
|
||||
assertEquals(expected, actual);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeIds_OtherAllPartitions() {
|
||||
RequestPartitionId input0 = RequestPartitionId.fromPartitionIds(1, 2, 3);
|
||||
RequestPartitionId input1 = RequestPartitionId.allPartitions();
|
||||
|
||||
RequestPartitionId actual = input0.mergeIds(input1);
|
||||
RequestPartitionId expected = RequestPartitionId.allPartitions();
|
||||
assertEquals(expected, actual);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeIds_IncludesDefault() {
|
||||
RequestPartitionId input0 = RequestPartitionId.fromPartitionIds(1, 2, 3);
|
||||
RequestPartitionId input1 = RequestPartitionId.defaultPartition();
|
||||
|
||||
RequestPartitionId actual = input0.mergeIds(input1);
|
||||
RequestPartitionId expected = RequestPartitionId.fromPartitionIds(1, 2, 3, null);
|
||||
assertEquals(expected, actual);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSerDeserSer() throws JsonProcessingException {
|
||||
{
|
||||
|
|
|
@ -3,14 +3,21 @@ package ca.uhn.fhir.util;
|
|||
import jakarta.annotation.Nonnull;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.Mockito.times;
|
||||
|
@ -43,8 +50,32 @@ public class TaskChunkerTest {
|
|||
|
||||
@Nonnull
|
||||
private static List<Integer> newIntRangeList(int startInclusive, int endExclusive) {
|
||||
List<Integer> input = IntStream.range(startInclusive, endExclusive).boxed().toList();
|
||||
return input;
|
||||
return IntStream.range(startInclusive, endExclusive).boxed().toList();
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("testIteratorChunkArguments")
|
||||
void testIteratorChunk(List<Integer> theListToChunk, List<List<Integer>> theExpectedChunks) {
|
||||
// given
|
||||
Iterator<Integer> iter = theListToChunk.iterator();
|
||||
ArrayList<List<Integer>> result = new ArrayList<>();
|
||||
|
||||
// when
|
||||
new TaskChunker<Integer>().chunk(iter, 3, result::add);
|
||||
|
||||
// then
|
||||
assertEquals(theExpectedChunks, result);
|
||||
}
|
||||
|
||||
public static Stream<Arguments> testIteratorChunkArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(Collections.emptyList(), Collections.emptyList()),
|
||||
Arguments.of(List.of(1), List.of(List.of(1))),
|
||||
Arguments.of(List.of(1,2), List.of(List.of(1,2))),
|
||||
Arguments.of(List.of(1,2,3), List.of(List.of(1,2,3))),
|
||||
Arguments.of(List.of(1,2,3,4), List.of(List.of(1,2,3), List.of(4))),
|
||||
Arguments.of(List.of(1,2,3,4,5,6,7,8,9), List.of(List.of(1,2,3), List.of(4,5,6), List.of(7,8,9)))
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
@ -12,7 +12,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -339,7 +339,8 @@
|
|||
"lat": 18.5204,
|
||||
"lon": 73.8567,
|
||||
"added": "2020-06-18"
|
||||
},{
|
||||
},
|
||||
{
|
||||
"title": "Grand Rounds, Inc.",
|
||||
"description": "Our mission is to raise the standard of healthcare for everyone, everywhere.",
|
||||
"link": "https://grandrounds.com",
|
||||
|
@ -437,6 +438,18 @@
|
|||
"lat": 18.483402,
|
||||
"lon": -69.929611,
|
||||
"added": "2022-08-15"
|
||||
},
|
||||
{
|
||||
"title": "Eviden BSD R&D Spain (Atos)",
|
||||
"description": "Use of HAPI-FHIR as the centralized data aggregation repository for clinical research studies.",
|
||||
"company": "Eviden BSD R&D Spain (Atos)",
|
||||
"contactName": "Alberto Acebes",
|
||||
"contactEmail": "alberto.acebes@eviden.com",
|
||||
"link": "https://booklet.evidenresearch.eu/",
|
||||
"city": "Madrid",
|
||||
"lat": 40.43486,
|
||||
"lon": -3.63220,
|
||||
"added": "2022-07-19"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 5138
|
||||
title: "A match result map field has been added to the `$mdm-link-history` operation. This new field shows the rules that
|
||||
evaluated true during matching and the corresponding initial match result when the link was created."
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2024-05-30"
|
||||
codename: "Borealis"
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2024-07-19"
|
||||
codename: "Borealis"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 4837
|
||||
title: "In the case where a resource was serialized, deserialized, copied and reserialized it resulted in duplication of
|
||||
contained resources. This has been corrected."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5960
|
||||
backport: 7.2.1
|
||||
title: "Previously, queries with chained would fail to sort correctly with lucene and full text searches enabled.
|
||||
This has been fixed."
|
|
@ -1,6 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6024
|
||||
backport: 7.2.2
|
||||
title: "Fixed a bug in search where requesting a count with HSearch indexing
|
||||
and FilterParameter enabled and using the _filter parameter would result
|
||||
in inaccurate results being returned.
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 6038
|
||||
title: "Allow overriding RestfulServer's contextPath determination by overriding IServerAddressStrategy.
|
||||
Thanks to Alex Kopp (@alexrkopp) for the contribution!"
|
|
@ -1,6 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6044
|
||||
backport: 7.2.2
|
||||
title: "Fixed an issue where doing a cache refresh with advanced Hibernate Search
|
||||
enabled would result in an infinite loop of cache refresh -> search for
|
||||
StructureDefinition -> cache refresh, etc
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6046
|
||||
backport: 7.2.2
|
||||
title: "Previously, using `_text` and `_content` searches in Hibernate Search in R5 was not supported. This issue has been fixed."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 6046
|
||||
backport: 7.2.2
|
||||
title: "Added support for `:contains` parameter qualifier on the `_text` and `_content` Search Parameters. When using Hibernate Search, this will cause
|
||||
the search to perform an substring match on the provided value. Documentation can be found [here](/hapi-fhir/docs/server_jpa/elastic.html#performing-fulltext-search-in-luceneelasticsearch)."
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6074
|
||||
title: "Before being processed, subscriptions would be read out of the database all
|
||||
at once. This lead to massive memory consumption if there were a lot of them.
|
||||
This has now been changed to use batching as a means of mitigating this problem.
|
||||
"
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6083
|
||||
title: "Address care-gaps bug for handling missing improvement notation validation, and group level measure scoring definition.
|
||||
Bug fix for evaluate-measure subject-list where contained List resource was setting invalid references. Bumping to latest version
|
||||
of clinical reasoning 3.8.
|
||||
"
|
|
@ -1,4 +0,0 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6090
|
||||
title: "A regression caused partition resolution to fail when creating non-partitionable resources. The issue is fixed."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6094
|
||||
title: "Fix for regression of searches for canonical uris using a version
|
||||
(eg: http://example.com|1.2.3).
|
||||
"
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6097
|
||||
title: "Fixed a bug where booting the JPA Server Starter would get a duplicate bean error. This has been corrected. Thanks to [@subigre](https://github.com/subigre) for the fix!"
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 6099
|
||||
title: "Database migrations that add or drop an index no longer lock tables when running on Azure Sql Server."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6111
|
||||
title: "Previously, the package installer wouldn't create a composite SearchParameter resource if the SearchParameter
|
||||
resource didn't have an expression element at the root level. This has now been fixed by making
|
||||
SearchParameter validation in package installer consistent with the DAO level validations."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6094
|
||||
jira: SMILE-8693
|
||||
title: "Searching or conditional creating/updating with a timestamp with an offset containing '+' fails with HAPI-1883.
|
||||
For example: 'Observation?date=2024-07-08T20:47:12.123+03:30'
|
||||
This has been fixed."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6122
|
||||
title: "Previously, executing the '$validate' operation on a resource instance could result in an HTTP 400 Bad Request
|
||||
instead of an HTTP 200 OK response with a list of validation issues. This has been fixed."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6123
|
||||
title: "`IAnyResource` `_id` search parameter was missing `path` property value, which resulted in extractor not
|
||||
working when standard search parameters were instantiated from defined context. This has been fixed, and also
|
||||
`_LastUpdated`, `_tag`, `_profile`, and `_security` parameter definitions were added to the class."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6124
|
||||
title: "Previously, when retrieving a resource which may contain other resources, such as a document Bundle,
|
||||
if a ConsentService's willSeeResource returned AUTHORIZED or REJECT on this parent resource, the willSeeResource was
|
||||
still being called for the child resources. This has now been fixed so that if a consent service
|
||||
returns AUTHORIZED or REJECT for a parent resource, willSeeResource is not called for the child resources."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6083
|
||||
backport: 7.2.2
|
||||
title: "A bug with $everything operation was discovered when trying to search using hibernate search, this change makes
|
||||
all $everything operation rely on database search until hibernate search fully supports the operation."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6134
|
||||
backport: 7.2.2
|
||||
title: "Fixed a regression in 7.2.0 which caused systems using `FILESYSTEM` binary storage mode to be unable to read metadata documents
|
||||
that had been previously stored on disk."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: change
|
||||
issue: 6140
|
||||
title: "An prototype interface to abstract data access across different types
|
||||
of FHIR repositories (e.g. remote REST, local JPA) has been added to the `hapi-fhir-base` project.
|
||||
Implementations of this interface will follow in future HAPI releases, and it will continue to evolve
|
||||
as it's validated through implementation."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6142
|
||||
jira: SMILE-8701
|
||||
title: "Previously, if you upgraded from any older HAPI version to 6.6.0 or later, the `SEARCH_UUID` column length still
|
||||
showed as 36 despite it being updated to have a length of 48. This has now been fixed."
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6146
|
||||
jira: SMILE-8191
|
||||
title: "Previously, on MSSQL, two resources with IDs that are identical except for case
|
||||
(ex: Patient1 vs. patient1) would be considered to have the same ID because the database collation is
|
||||
case insensitive (SQL_Latin1_General_CP1_CI_AS). Among other things, this would manifest
|
||||
itself when trying to delete and re-create one of the resources.
|
||||
This has been fixed with a migration step that makes the collation on the resource ID case sensitive
|
||||
(SQL_Latin1_General_CP1_CS_AS)."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 6148
|
||||
jira: SMILE-8613
|
||||
title: "Added the target resource partitionId and partitionDate to the resourceLink table."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6150
|
||||
title: "Previously, the resource $validate operation would return a 404 when the associated profile uses a ValueSet
|
||||
that has multiple includes referencing Remote Terminology CodeSystem resources.
|
||||
This has been fixed to return a 200 with issues instead."
|
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6153
|
||||
title: "Previously, if you created a resource with some conditional url,
|
||||
but then submitted a transaction bundle that
|
||||
a) updated the resource to not match the condition anymore and
|
||||
b) create a resource with the (same) condition
|
||||
a unique index violation would result.
|
||||
|
||||
This has been fixed.
|
||||
"
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6156
|
||||
title: "Index IDX_IDXCMBTOKNU_HASHC on table HFJ_IDX_CMB_TOK_NU's migration
|
||||
is now marked as online (concurrent).
|
||||
"
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6159
|
||||
jira: SMILE-8604
|
||||
title: "Previously, `$apply-codesystem-delta-add` and `$apply-codesystem-delta-remove` operations were failing
|
||||
with a 500 Server Error when invoked with a CodeSystem Resource payload that had a concept without a
|
||||
`display` element. This has now been fixed so that concepts without display field is accepted, as `display`
|
||||
element is not required."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
jira: SMILE-8652
|
||||
title: "When JPA servers are configured to always require a new database
|
||||
transaction when switching partitions, the server will now correctly
|
||||
identify the correct partition for FHIR transaction operations, and
|
||||
fail the operation if multiple partitions would be required."
|
|
@ -0,0 +1,7 @@
|
|||
|
||||
---
|
||||
type: change
|
||||
issue: 6179
|
||||
title: "The $reindex operation could potentially initiate a reindex job without any urls provided in the parameters.
|
||||
We now internally generate a list of urls out of all the supported resource types and attempt to reindex
|
||||
found resources of each type separately. As a result, each reindex (batch2) job chunk will be always associated with a url."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6179
|
||||
title: "Previously, the $reindex operation would fail when using a custom partitioning interceptor which decides the partition
|
||||
based on the resource type in the request. This has been fixed, such that we avoid retrieving the resource type from
|
||||
the request, rather we use the urls provided as parameters to the operation to determine the partitions."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6188
|
||||
jira: SMILE-8759
|
||||
title: "Previously, a Subscription not marked as a cross-partition subscription could listen to incoming resources from
|
||||
other partitions. This issue is fixed."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6208
|
||||
title: "A regression was temporarily introduced which caused searches by `_lastUpdated` to fail with a NullPointerException when using Lucene as the backing search engine. This has been corrected"
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: add
|
||||
issue: 6182
|
||||
title: "A new Pointcut called `MDM_SUBMIT_PRE_MESSAGE_DELIVERY` has been added. If you wish to customize the `ResourceModifiedJsonMessage` sent to the broker, you can do so by implementing this Pointcut, and returning `ResourceModifiedJsonMessage`."
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 6216
|
||||
jira: SMILE-8806
|
||||
title: "Previously, searches combining the `_text` query parameter (using Lucene/Elasticsearch) with query parameters
|
||||
using the database (e.g. `identifier` or `date`) could miss matches when more than 500 results match the `_text` query
|
||||
parameter. This has been fixed, but may be slow if many results match the `_text` query and must be checked against the
|
||||
database parameters."
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
- item:
|
||||
type: "add"
|
||||
title: "The version of a few dependencies have been bumped to more recent versions
|
||||
(dependent HAPI modules listed in brackets):
|
||||
<ul>
|
||||
<li>Bower/Moment.js (hapi-fhir-testpage-overlay): 2.27.0 -> 2.29.4</li>
|
||||
<li>htmlunit (Base): 3.9.0 -> 3.11.0</li>
|
||||
<li>Elasticsearch (Base): 8.11.1 -> 8.14.3</li>
|
||||
</ul>"
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2024-11-15"
|
||||
codename: "TBD"
|
|
@ -31,6 +31,12 @@ Note that the Oracle JDBC drivers are not distributed in the Maven Central repos
|
|||
java -cp hapi-fhir-cli.jar ca.uhn.fhir.cli.App migrate-database -d ORACLE_12C -u "[url]" -n "[username]" -p "[password]"
|
||||
```
|
||||
|
||||
# Oracle and Sql Server Locking Note
|
||||
|
||||
Some versions of Oracle and Sql Server (e.g. Oracle Standard or Sql Server Standard) do NOT support adding or removing an index without locking the underlying table.
|
||||
If you run migrations while these systems are running,
|
||||
they will have unavoidable long pauses in activity during these changes.
|
||||
|
||||
## Migrating 3.4.0 to 3.5.0+
|
||||
|
||||
As of HAPI FHIR 3.5.0 a new mechanism for creating the JPA index tables (HFJ_SPIDX_xxx) has been implemented. This new mechanism uses hashes in place of large multi-column indexes. This improves both lookup times as well as required storage space. This change also paves the way for future ability to provide efficient multi-tenant searches (which is not yet implemented but is planned as an incremental improvement).
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.3.10-SNAPSHOT</version>
|
||||
<version>7.5.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.reindex;
|
||||
package ca.uhn.fhir.jpa.batch2;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
|
@ -41,8 +41,10 @@ import ca.uhn.fhir.rest.api.SortSpec;
|
|||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.DateRangeUtil;
|
||||
import ca.uhn.fhir.util.Logs;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
||||
import java.util.Date;
|
||||
|
@ -50,7 +52,7 @@ import java.util.function.Supplier;
|
|||
import java.util.stream.Stream;
|
||||
|
||||
public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(Batch2DaoSvcImpl.class);
|
||||
private static final org.slf4j.Logger ourLog = Logs.getBatchTroubleshootingLog();
|
||||
|
||||
private final IResourceTableDao myResourceTableDao;
|
||||
|
||||
|
@ -83,7 +85,7 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
|
|||
@Override
|
||||
public IResourcePidStream fetchResourceIdStream(
|
||||
Date theStart, Date theEnd, RequestPartitionId theRequestPartitionId, String theUrl) {
|
||||
if (theUrl == null) {
|
||||
if (StringUtils.isBlank(theUrl)) {
|
||||
return makeStreamResult(
|
||||
theRequestPartitionId, () -> streamResourceIdsNoUrl(theStart, theEnd, theRequestPartitionId));
|
||||
} else {
|
||||
|
@ -127,6 +129,10 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
|
|||
return new TypedResourceStream(theRequestPartitionId, streamTemplate);
|
||||
}
|
||||
|
||||
/**
|
||||
* At the moment there is no use-case for this method.
|
||||
* This can be cleaned up at a later point in time if there is no use for it.
|
||||
*/
|
||||
@Nonnull
|
||||
private Stream<TypedResourcePid> streamResourceIdsNoUrl(
|
||||
Date theStart, Date theEnd, RequestPartitionId theRequestPartitionId) {
|
|
@ -19,7 +19,6 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.batch2;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobPartitionProvider;
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.config.BaseBatch2Config;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
|
@ -28,8 +27,6 @@ import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
|||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkMetadataViewRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
@ -55,10 +52,4 @@ public class JpaBatch2Config extends BaseBatch2Config {
|
|||
theEntityManager,
|
||||
theInterceptorBroadcaster);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public IJobPartitionProvider jobPartitionProvider(
|
||||
IRequestPartitionHelperSvc theRequestPartitionHelperSvc, IPartitionLookupSvc thePartitionLookupSvc) {
|
||||
return new JpaJobPartitionProvider(theRequestPartitionHelperSvc, thePartitionLookupSvc);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,45 +19,47 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.batch2;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobPartitionProvider;
|
||||
import ca.uhn.fhir.batch2.coordinator.DefaultJobPartitionProvider;
|
||||
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrl;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* The default JPA implementation, which uses {@link IRequestPartitionHelperSvc} and {@link IPartitionLookupSvc}
|
||||
* to compute the partition to run a batch2 job.
|
||||
* to compute the {@link PartitionedUrl} list to run a batch2 job.
|
||||
* The latter will be used to handle cases when the job is configured to run against all partitions
|
||||
* (bulk system operation) and will return the actual list with all the configured partitions.
|
||||
*/
|
||||
public class JpaJobPartitionProvider implements IJobPartitionProvider {
|
||||
protected final IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
@Deprecated
|
||||
public class JpaJobPartitionProvider extends DefaultJobPartitionProvider {
|
||||
private final IPartitionLookupSvc myPartitionLookupSvc;
|
||||
|
||||
public JpaJobPartitionProvider(
|
||||
IRequestPartitionHelperSvc theRequestPartitionHelperSvc, IPartitionLookupSvc thePartitionLookupSvc) {
|
||||
myRequestPartitionHelperSvc = theRequestPartitionHelperSvc;
|
||||
super(theRequestPartitionHelperSvc);
|
||||
myPartitionLookupSvc = thePartitionLookupSvc;
|
||||
}
|
||||
|
||||
public JpaJobPartitionProvider(
|
||||
FhirContext theFhirContext,
|
||||
IRequestPartitionHelperSvc theRequestPartitionHelperSvc,
|
||||
MatchUrlService theMatchUrlService,
|
||||
IPartitionLookupSvc thePartitionLookupSvc) {
|
||||
super(theFhirContext, theRequestPartitionHelperSvc, theMatchUrlService);
|
||||
myPartitionLookupSvc = thePartitionLookupSvc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<RequestPartitionId> getPartitions(RequestDetails theRequestDetails, String theOperation) {
|
||||
RequestPartitionId partitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequestForServerOperation(
|
||||
theRequestDetails, theOperation);
|
||||
if (!partitionId.isAllPartitions()) {
|
||||
return List.of(partitionId);
|
||||
}
|
||||
// handle (bulk) system operations that are typically configured with RequestPartitionId.allPartitions()
|
||||
// populate the actual list of all partitions
|
||||
List<RequestPartitionId> partitionIdList = myPartitionLookupSvc.listPartitions().stream()
|
||||
public List<RequestPartitionId> getAllPartitions() {
|
||||
return myPartitionLookupSvc.listPartitions().stream()
|
||||
.map(PartitionEntity::toRequestPartitionId)
|
||||
.collect(Collectors.toList());
|
||||
partitionIdList.add(RequestPartitionId.defaultPartition());
|
||||
return partitionIdList;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
|||
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
|
||||
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.batch2.Batch2DaoSvcImpl;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
|
@ -32,7 +33,6 @@ import ca.uhn.fhir.jpa.dao.expunge.ResourceTableFKProvider;
|
|||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.delete.batch2.DeleteExpungeSqlBuilder;
|
||||
import ca.uhn.fhir.jpa.delete.batch2.DeleteExpungeSvcImpl;
|
||||
import ca.uhn.fhir.jpa.reindex.Batch2DaoSvcImpl;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.jobs.parameters.UrlPartitioner;
|
||||
import ca.uhn.fhir.batch2.api.IJobPartitionProvider;
|
||||
import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx;
|
||||
import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
|
@ -103,7 +103,6 @@ import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
|||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.util.ReflectionUtil;
|
||||
|
@ -193,6 +192,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
@Autowired
|
||||
private IRequestPartitionHelperSvc myRequestPartitionHelperService;
|
||||
|
||||
@Autowired
|
||||
private IJobPartitionProvider myJobPartitionProvider;
|
||||
|
||||
@Autowired
|
||||
private MatchUrlService myMatchUrlService;
|
||||
|
||||
|
@ -214,9 +216,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
private TransactionTemplate myTxTemplate;
|
||||
|
||||
@Autowired
|
||||
private UrlPartitioner myUrlPartitioner;
|
||||
|
||||
@Autowired
|
||||
private ResourceSearchUrlSvc myResourceSearchUrlSvc;
|
||||
|
||||
|
@ -1306,14 +1305,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
ReindexJobParameters params = new ReindexJobParameters();
|
||||
|
||||
List<String> urls = List.of();
|
||||
if (!isCommonSearchParam(theBase)) {
|
||||
addAllResourcesTypesToReindex(theBase, theRequestDetails, params);
|
||||
urls = theBase.stream().map(t -> t + "?").collect(Collectors.toList());
|
||||
}
|
||||
|
||||
RequestPartitionId requestPartition =
|
||||
myRequestPartitionHelperService.determineReadPartitionForRequestForServerOperation(
|
||||
theRequestDetails, ProviderConstants.OPERATION_REINDEX);
|
||||
params.setRequestPartitionId(requestPartition);
|
||||
myJobPartitionProvider.getPartitionedUrls(theRequestDetails, urls).forEach(params::addPartitionedUrl);
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
request.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
|
||||
|
@ -1334,14 +1331,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
return Boolean.parseBoolean(shouldSkip.toString());
|
||||
}
|
||||
|
||||
private void addAllResourcesTypesToReindex(
|
||||
List<String> theBase, RequestDetails theRequestDetails, ReindexJobParameters params) {
|
||||
theBase.stream()
|
||||
.map(t -> t + "?")
|
||||
.map(url -> myUrlPartitioner.partitionUrl(url, theRequestDetails))
|
||||
.forEach(params::addPartitionedUrl);
|
||||
}
|
||||
|
||||
private boolean isCommonSearchParam(List<String> theBase) {
|
||||
// If the base contains the special resource "Resource", this is a common SP that applies to all resources
|
||||
return theBase.stream().map(String::toLowerCase).anyMatch(BASE_RESOURCE_NAME::equals);
|
||||
|
@ -2457,11 +2446,13 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
RestOperationTypeEnum theOperationType,
|
||||
TransactionDetails theTransactionDetails) {
|
||||
|
||||
// we stored a resource searchUrl at creation time to prevent resource duplication. Let's remove the entry on
|
||||
// the
|
||||
// first update but guard against unnecessary trips to the database on subsequent ones.
|
||||
/*
|
||||
* We stored a resource searchUrl at creation time to prevent resource duplication.
|
||||
* We'll clear any currently existing urls from the db, otherwise we could hit
|
||||
* duplicate index violations if we try to add another (after this create/update)
|
||||
*/
|
||||
ResourceTable entity = (ResourceTable) theEntity;
|
||||
if (entity.isSearchUrlPresent() && thePerformIndexing) {
|
||||
if (entity.isSearchUrlPresent()) {
|
||||
myResourceSearchUrlSvc.deleteByResId(
|
||||
(Long) theEntity.getPersistentId().getId());
|
||||
entity.setSearchUrlPresent(false);
|
||||
|
|
|
@ -32,6 +32,7 @@ import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchResourceProjection;
|
|||
import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchSearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.search.IHSearchSortHelper;
|
||||
import ca.uhn.fhir.jpa.dao.search.LastNOperation;
|
||||
import ca.uhn.fhir.jpa.dao.search.SearchScrollQueryExecutorAdaptor;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams;
|
||||
|
@ -40,6 +41,7 @@ import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
|||
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteOptions;
|
||||
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteSearch;
|
||||
import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchQueryExecutors;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
|
||||
|
@ -184,6 +186,19 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
return doSearch(theResourceName, theParams, null, theMaxResultsToFetch, theRequestDetails);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@Override
|
||||
public ISearchQueryExecutor searchScrolled(
|
||||
String theResourceType, SearchParameterMap theParams, RequestDetails theRequestDetails) {
|
||||
validateHibernateSearchIsEnabled();
|
||||
|
||||
SearchQueryOptionsStep<?, Long, SearchLoadingOptionsStep, ?, ?> searchQueryOptionsStep =
|
||||
getSearchQueryOptionsStep(theResourceType, theParams, null);
|
||||
logQuery(searchQueryOptionsStep, theRequestDetails);
|
||||
|
||||
return new SearchScrollQueryExecutorAdaptor(searchQueryOptionsStep.scroll(SearchBuilder.getMaximumPageSize()));
|
||||
}
|
||||
|
||||
// keep this in sync with supportsSomeOf();
|
||||
@SuppressWarnings("rawtypes")
|
||||
private ISearchQueryExecutor doSearch(
|
||||
|
|
|
@ -62,6 +62,17 @@ public interface IFulltextSearchSvc {
|
|||
Integer theMaxResultsToFetch,
|
||||
RequestDetails theRequestDetails);
|
||||
|
||||
/**
|
||||
* Query the index for a complete iterator of ALL results. (scrollable search result).
|
||||
*
|
||||
* @param theResourceName e.g. Patient
|
||||
* @param theParams The search query
|
||||
* @param theRequestDetails The request details
|
||||
* @return Iterator of result PIDs
|
||||
*/
|
||||
ISearchQueryExecutor searchScrolled(
|
||||
String theResourceName, SearchParameterMap theParams, RequestDetails theRequestDetails);
|
||||
|
||||
/**
|
||||
* Autocomplete search for NIH $expand contextDirection=existing
|
||||
* @param theOptions operation options
|
||||
|
|
|
@ -27,7 +27,6 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
|||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
|
@ -97,9 +96,6 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
@Autowired
|
||||
private IIdHelperService<JpaPid> myIdHelperService;
|
||||
|
||||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
|
||||
@Autowired
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
|
@ -150,14 +146,9 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
List<IBase> theEntries,
|
||||
StopWatch theTransactionStopWatch) {
|
||||
|
||||
ITransactionProcessorVersionAdapter versionAdapter = getVersionAdapter();
|
||||
RequestPartitionId requestPartitionId = null;
|
||||
if (!myPartitionSettings.isPartitioningEnabled()) {
|
||||
requestPartitionId = RequestPartitionId.allPartitions();
|
||||
} else {
|
||||
// If all entries in the transaction point to the exact same partition, we'll try and do a pre-fetch
|
||||
requestPartitionId = getSinglePartitionForAllEntriesOrNull(theRequest, theEntries, versionAdapter);
|
||||
}
|
||||
ITransactionProcessorVersionAdapter<?, ?> versionAdapter = getVersionAdapter();
|
||||
RequestPartitionId requestPartitionId =
|
||||
super.determineRequestPartitionIdForWriteEntries(theRequest, theEntries);
|
||||
|
||||
if (requestPartitionId != null) {
|
||||
preFetch(theTransactionDetails, theEntries, versionAdapter, requestPartitionId);
|
||||
|
@ -472,24 +463,6 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
private RequestPartitionId getSinglePartitionForAllEntriesOrNull(
|
||||
RequestDetails theRequest, List<IBase> theEntries, ITransactionProcessorVersionAdapter versionAdapter) {
|
||||
RequestPartitionId retVal = null;
|
||||
Set<RequestPartitionId> requestPartitionIdsForAllEntries = new HashSet<>();
|
||||
for (IBase nextEntry : theEntries) {
|
||||
IBaseResource resource = versionAdapter.getResource(nextEntry);
|
||||
if (resource != null) {
|
||||
RequestPartitionId requestPartition = myRequestPartitionSvc.determineCreatePartitionForRequest(
|
||||
theRequest, resource, myFhirContext.getResourceType(resource));
|
||||
requestPartitionIdsForAllEntries.add(requestPartition);
|
||||
}
|
||||
}
|
||||
if (requestPartitionIdsForAllEntries.size() == 1) {
|
||||
retVal = requestPartitionIdsForAllEntries.iterator().next();
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a token parameter, build the query predicate based on its hash. Uses system and value if both are available, otherwise just value.
|
||||
* If neither are available, it returns null.
|
||||
|
@ -570,11 +543,6 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setPartitionSettingsForUnitTest(PartitionSettings thePartitionSettings) {
|
||||
myPartitionSettings = thePartitionSettings;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setIdHelperServiceForUnitTest(IIdHelperService theIdHelperService) {
|
||||
myIdHelperService = theIdHelperService;
|
||||
|
|
|
@ -23,18 +23,19 @@ package ca.uhn.fhir.jpa.dao.data;
|
|||
import ca.uhn.fhir.jpa.model.entity.IPersistedResourceModifiedMessage;
|
||||
import ca.uhn.fhir.jpa.model.entity.PersistedResourceModifiedMessageEntityPK;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceModifiedEntity;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface IResourceModifiedDao
|
||||
extends JpaRepository<ResourceModifiedEntity, PersistedResourceModifiedMessageEntityPK>,
|
||||
IHapiFhirJpaRepository {
|
||||
|
||||
@Query("SELECT r FROM ResourceModifiedEntity r ORDER BY r.myCreatedTime ASC")
|
||||
List<IPersistedResourceModifiedMessage> findAllOrderedByCreatedTime();
|
||||
Page<IPersistedResourceModifiedMessage> findAllOrderedByCreatedTime(Pageable thePage);
|
||||
|
||||
@Modifying
|
||||
@Query("delete from ResourceModifiedEntity r where r.myResourceModifiedEntityPK =:pk")
|
||||
|
|
|
@ -135,7 +135,8 @@ public interface IResourceTableDao
|
|||
* This method returns a Collection where each row is an element in the collection. Each element in the collection
|
||||
* is an object array, where the order matters (the array represents columns returned by the query). Be careful if you change this query in any way.
|
||||
*/
|
||||
@Query("SELECT t.myResourceType, t.myId, t.myDeleted FROM ResourceTable t WHERE t.myId IN (:pid)")
|
||||
@Query(
|
||||
"SELECT t.myResourceType, t.myId, t.myDeleted, t.myPartitionIdValue, t.myPartitionDateValue FROM ResourceTable t WHERE t.myId IN (:pid)")
|
||||
Collection<Object[]> findLookupFieldsByResourcePid(@Param("pid") List<Long> thePids);
|
||||
|
||||
/**
|
||||
|
@ -143,7 +144,7 @@ public interface IResourceTableDao
|
|||
* is an object array, where the order matters (the array represents columns returned by the query). Be careful if you change this query in any way.
|
||||
*/
|
||||
@Query(
|
||||
"SELECT t.myResourceType, t.myId, t.myDeleted FROM ResourceTable t WHERE t.myId IN (:pid) AND t.myPartitionIdValue IN :partition_id")
|
||||
"SELECT t.myResourceType, t.myId, t.myDeleted, t.myPartitionIdValue, t.myPartitionDateValue FROM ResourceTable t WHERE t.myId IN (:pid) AND t.myPartitionIdValue IN :partition_id")
|
||||
Collection<Object[]> findLookupFieldsByResourcePidInPartitionIds(
|
||||
@Param("pid") List<Long> thePids, @Param("partition_id") Collection<Integer> thePartitionId);
|
||||
|
||||
|
@ -152,7 +153,7 @@ public interface IResourceTableDao
|
|||
* is an object array, where the order matters (the array represents columns returned by the query). Be careful if you change this query in any way.
|
||||
*/
|
||||
@Query(
|
||||
"SELECT t.myResourceType, t.myId, t.myDeleted FROM ResourceTable t WHERE t.myId IN (:pid) AND (t.myPartitionIdValue IS NULL OR t.myPartitionIdValue IN :partition_id)")
|
||||
"SELECT t.myResourceType, t.myId, t.myDeleted, t.myPartitionIdValue, t.myPartitionDateValue FROM ResourceTable t WHERE t.myId IN (:pid) AND (t.myPartitionIdValue IS NULL OR t.myPartitionIdValue IN :partition_id)")
|
||||
Collection<Object[]> findLookupFieldsByResourcePidInPartitionIdsOrNullPartition(
|
||||
@Param("pid") List<Long> thePids, @Param("partition_id") Collection<Integer> thePartitionId);
|
||||
|
||||
|
@ -161,7 +162,7 @@ public interface IResourceTableDao
|
|||
* is an object array, where the order matters (the array represents columns returned by the query). Be careful if you change this query in any way.
|
||||
*/
|
||||
@Query(
|
||||
"SELECT t.myResourceType, t.myId, t.myDeleted FROM ResourceTable t WHERE t.myId IN (:pid) AND t.myPartitionIdValue IS NULL")
|
||||
"SELECT t.myResourceType, t.myId, t.myDeleted, t.myPartitionIdValue, t.myPartitionDateValue FROM ResourceTable t WHERE t.myId IN (:pid) AND t.myPartitionIdValue IS NULL")
|
||||
Collection<Object[]> findLookupFieldsByResourcePidInPartitionNull(@Param("pid") List<Long> thePids);
|
||||
|
||||
@Query("SELECT t.myVersion FROM ResourceTable t WHERE t.myId = :pid")
|
||||
|
|
|
@ -56,9 +56,10 @@ public class IResourceTableDaoImpl implements IForcedIdQueries {
|
|||
@Override
|
||||
public Collection<Object[]> findAndResolveByForcedIdWithNoType(
|
||||
String theResourceType, Collection<String> theForcedIds, boolean theExcludeDeleted) {
|
||||
String query = "SELECT t.myResourceType, t.myId, t.myFhirId, t.myDeleted "
|
||||
+ "FROM ResourceTable t "
|
||||
+ "WHERE t.myResourceType = :resource_type AND t.myFhirId IN ( :forced_id )";
|
||||
String query =
|
||||
"SELECT t.myResourceType, t.myId, t.myFhirId, t.myDeleted, t.myPartitionIdValue, t.myPartitionDateValue "
|
||||
+ "FROM ResourceTable t "
|
||||
+ "WHERE t.myResourceType = :resource_type AND t.myFhirId IN ( :forced_id )";
|
||||
|
||||
if (theExcludeDeleted) {
|
||||
query += " AND t.myDeleted IS NULL";
|
||||
|
@ -82,9 +83,10 @@ public class IResourceTableDaoImpl implements IForcedIdQueries {
|
|||
Collection<String> theForcedIds,
|
||||
Collection<Integer> thePartitionId,
|
||||
boolean theExcludeDeleted) {
|
||||
String query = "SELECT t.myResourceType, t.myId, t.myFhirId, t.myDeleted "
|
||||
+ "FROM ResourceTable t "
|
||||
+ "WHERE t.myResourceType = :resource_type AND t.myFhirId IN ( :forced_id ) AND t.myPartitionIdValue IN ( :partition_id )";
|
||||
String query =
|
||||
"SELECT t.myResourceType, t.myId, t.myFhirId, t.myDeleted, t.myPartitionIdValue, t.myPartitionDateValue "
|
||||
+ "FROM ResourceTable t "
|
||||
+ "WHERE t.myResourceType = :resource_type AND t.myFhirId IN ( :forced_id ) AND t.myPartitionIdValue IN ( :partition_id )";
|
||||
|
||||
if (theExcludeDeleted) {
|
||||
query += " AND t.myDeleted IS NULL";
|
||||
|
@ -106,9 +108,11 @@ public class IResourceTableDaoImpl implements IForcedIdQueries {
|
|||
@Override
|
||||
public Collection<Object[]> findAndResolveByForcedIdWithNoTypeInPartitionNull(
|
||||
String theResourceType, Collection<String> theForcedIds, boolean theExcludeDeleted) {
|
||||
String query = "SELECT t.myResourceType, t.myId, t.myFhirId, t.myDeleted "
|
||||
+ "FROM ResourceTable t "
|
||||
+ "WHERE t.myResourceType = :resource_type AND t.myFhirId IN ( :forced_id ) AND t.myPartitionIdValue IS NULL";
|
||||
// we fetch myPartitionIdValue and myPartitionDateValue for resultSet processing consistency
|
||||
String query =
|
||||
"SELECT t.myResourceType, t.myId, t.myFhirId, t.myDeleted, t.myPartitionIdValue, t.myPartitionDateValue "
|
||||
+ "FROM ResourceTable t "
|
||||
+ "WHERE t.myResourceType = :resource_type AND t.myFhirId IN ( :forced_id ) AND t.myPartitionIdValue IS NULL";
|
||||
|
||||
if (theExcludeDeleted) {
|
||||
query += " AND t.myDeleted IS NULL";
|
||||
|
@ -132,9 +136,10 @@ public class IResourceTableDaoImpl implements IForcedIdQueries {
|
|||
Collection<String> theForcedIds,
|
||||
List<Integer> thePartitionIdsWithoutDefault,
|
||||
boolean theExcludeDeleted) {
|
||||
String query = "SELECT t.myResourceType, t.myId, t.myFhirId, t.myDeleted "
|
||||
+ "FROM ResourceTable t "
|
||||
+ "WHERE t.myResourceType = :resource_type AND t.myFhirId IN ( :forced_id ) AND (t.myPartitionIdValue IS NULL OR t.myPartitionIdValue IN ( :partition_id ))";
|
||||
String query =
|
||||
"SELECT t.myResourceType, t.myId, t.myFhirId, t.myDeleted, t.myPartitionIdValue, t.myPartitionDateValue "
|
||||
+ "FROM ResourceTable t "
|
||||
+ "WHERE t.myResourceType = :resource_type AND t.myFhirId IN ( :forced_id ) AND (t.myPartitionIdValue IS NULL OR t.myPartitionIdValue IN ( :partition_id ))";
|
||||
|
||||
if (theExcludeDeleted) {
|
||||
query += " AND t.myDeleted IS NULL";
|
||||
|
|
|
@ -30,6 +30,7 @@ import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
|||
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.cross.JpaResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
|
@ -59,12 +60,11 @@ import org.hl7.fhir.instance.model.api.IAnyResource;
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -100,7 +100,6 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
*/
|
||||
@Service
|
||||
public class IdHelperService implements IIdHelperService<JpaPid> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(IdHelperService.class);
|
||||
public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0];
|
||||
public static final String RESOURCE_PID = "RESOURCE_PID";
|
||||
|
||||
|
@ -523,7 +522,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) {
|
||||
List<Long> pids = theId.stream()
|
||||
.filter(t -> isValidPid(t))
|
||||
.map(t -> t.getIdPartAsLong())
|
||||
.map(IIdType::getIdPartAsLong)
|
||||
.collect(Collectors.toList());
|
||||
if (!pids.isEmpty()) {
|
||||
resolvePids(requestPartitionId, pids, retVal);
|
||||
|
@ -578,8 +577,14 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
Long resourcePid = (Long) next[1];
|
||||
String forcedId = (String) next[2];
|
||||
Date deletedAt = (Date) next[3];
|
||||
Integer partitionId = (Integer) next[4];
|
||||
LocalDate partitionDate = (LocalDate) next[5];
|
||||
|
||||
JpaResourceLookup lookup = new JpaResourceLookup(resourceType, resourcePid, deletedAt);
|
||||
JpaResourceLookup lookup = new JpaResourceLookup(
|
||||
resourceType,
|
||||
resourcePid,
|
||||
deletedAt,
|
||||
PartitionablePartitionId.with(partitionId, partitionDate));
|
||||
retVal.computeIfAbsent(forcedId, id -> new ArrayList<>()).add(lookup);
|
||||
|
||||
if (!myStorageSettings.isDeleteEnabled()) {
|
||||
|
@ -638,7 +643,11 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
}
|
||||
}
|
||||
lookup.stream()
|
||||
.map(t -> new JpaResourceLookup((String) t[0], (Long) t[1], (Date) t[2]))
|
||||
.map(t -> new JpaResourceLookup(
|
||||
(String) t[0],
|
||||
(Long) t[1],
|
||||
(Date) t[2],
|
||||
PartitionablePartitionId.with((Integer) t[3], (LocalDate) t[4])))
|
||||
.forEach(t -> {
|
||||
String id = t.getPersistentId().toString();
|
||||
if (!theTargets.containsKey(id)) {
|
||||
|
@ -683,9 +692,8 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, nextResourcePid, Optional.empty());
|
||||
}
|
||||
Map<JpaPid, Optional<String>> convertRetVal = new HashMap<>();
|
||||
retVal.forEach((k, v) -> {
|
||||
convertRetVal.put(JpaPid.fromId(k), v);
|
||||
});
|
||||
retVal.forEach((k, v) -> convertRetVal.put(JpaPid.fromId(k), v));
|
||||
|
||||
return new PersistentIdToForcedIdMap<>(convertRetVal);
|
||||
}
|
||||
|
||||
|
@ -716,7 +724,8 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
}
|
||||
|
||||
if (!myStorageSettings.isDeleteEnabled()) {
|
||||
JpaResourceLookup lookup = new JpaResourceLookup(theResourceType, theJpaPid.getId(), theDeletedAt);
|
||||
JpaResourceLookup lookup = new JpaResourceLookup(
|
||||
theResourceType, theJpaPid.getId(), theDeletedAt, theJpaPid.getPartitionablePartitionId());
|
||||
String nextKey = theJpaPid.toString();
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey, lookup);
|
||||
}
|
||||
|
@ -744,8 +753,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
@Nonnull
|
||||
public List<JpaPid> getPidsOrThrowException(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
|
||||
List<JpaPid> resourcePersistentIds = resolveResourcePersistentIdsWithCache(theRequestPartitionId, theIds);
|
||||
return resourcePersistentIds;
|
||||
return resolveResourcePersistentIdsWithCache(theRequestPartitionId, theIds);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -59,7 +59,7 @@ public class ExtendedHSearchSearchBuilder {
|
|||
/**
|
||||
* These params have complicated semantics, or are best resolved at the JPA layer for now.
|
||||
*/
|
||||
public static final Set<String> ourUnsafeSearchParmeters = Sets.newHashSet("_id", "_meta");
|
||||
public static final Set<String> ourUnsafeSearchParmeters = Sets.newHashSet("_id", "_meta", "_count");
|
||||
|
||||
/**
|
||||
* Determine if ExtendedHibernateSearchBuilder can support this parameter
|
||||
|
@ -67,20 +67,22 @@ public class ExtendedHSearchSearchBuilder {
|
|||
* @param theActiveParamsForResourceType active search parameters for the desired resource type
|
||||
* @return whether or not this search parameter is supported in hibernate
|
||||
*/
|
||||
public boolean supportsSearchParameter(String theParamName, ResourceSearchParams theActiveParamsForResourceType) {
|
||||
public boolean illegalForHibernateSearch(String theParamName, ResourceSearchParams theActiveParamsForResourceType) {
|
||||
if (theActiveParamsForResourceType == null) {
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
if (ourUnsafeSearchParmeters.contains(theParamName)) {
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
if (!theActiveParamsForResourceType.containsParamName(theParamName)) {
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* By default, do not use Hibernate Search.
|
||||
* If a Search Parameter is supported by hibernate search,
|
||||
* Are any of the queries supported by our indexing?
|
||||
* -
|
||||
* If not, do not use hibernate, because the results will
|
||||
|
@ -88,13 +90,13 @@ public class ExtendedHSearchSearchBuilder {
|
|||
*/
|
||||
public boolean canUseHibernateSearch(
|
||||
String theResourceType, SearchParameterMap myParams, ISearchParamRegistry theSearchParamRegistry) {
|
||||
boolean canUseHibernate = true;
|
||||
boolean canUseHibernate = false;
|
||||
|
||||
ResourceSearchParams resourceActiveSearchParams =
|
||||
theSearchParamRegistry.getActiveSearchParams(theResourceType, ISearchParamRegistry.ContextEnum.SEARCH);
|
||||
for (String paramName : myParams.keySet()) {
|
||||
// is this parameter supported?
|
||||
if (!supportsSearchParameter(paramName, resourceActiveSearchParams)) {
|
||||
if (illegalForHibernateSearch(paramName, resourceActiveSearchParams)) {
|
||||
canUseHibernate = false;
|
||||
} else {
|
||||
// are the parameter values supported?
|
||||
|
@ -220,7 +222,7 @@ public class ExtendedHSearchSearchBuilder {
|
|||
ResourceSearchParams activeSearchParams =
|
||||
searchParamRegistry.getActiveSearchParams(resourceType, ISearchParamRegistry.ContextEnum.SEARCH);
|
||||
for (String nextParam : paramNames) {
|
||||
if (!supportsSearchParameter(nextParam, activeSearchParams)) {
|
||||
if (illegalForHibernateSearch(nextParam, activeSearchParams)) {
|
||||
// ignore magic params handled in JPA
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -46,27 +46,29 @@ import java.util.Date;
|
|||
@SuppressWarnings("SqlDialectInspection")
|
||||
@Entity
|
||||
@Immutable
|
||||
@Subselect("SELECT h.pid as pid, "
|
||||
+ " r.res_id as res_id, "
|
||||
+ " h.res_type as res_type, "
|
||||
+ " h.res_version as res_version, "
|
||||
// Ideally, all tables and columns should be in UPPERCASE if we ever choose to use a case-sensitive collation for MSSQL
|
||||
// and there's a risk that queries on lowercase database objects fail.
|
||||
@Subselect("SELECT h.PID as PID, "
|
||||
+ " r.RES_ID as RES_ID, "
|
||||
+ " h.RES_TYPE as RES_TYPE, "
|
||||
+ " h.RES_VERSION as RES_VERSION, "
|
||||
// FHIR version
|
||||
+ " h.res_ver as res_ver, "
|
||||
+ " h.RES_VER as RES_VER, "
|
||||
// resource version
|
||||
+ " h.has_tags as has_tags, "
|
||||
+ " h.res_deleted_at as res_deleted_at, "
|
||||
+ " h.res_published as res_published, "
|
||||
+ " h.res_updated as res_updated, "
|
||||
+ " h.res_text as res_text, "
|
||||
+ " h.res_text_vc as res_text_vc, "
|
||||
+ " h.res_encoding as res_encoding, "
|
||||
+ " h.HAS_TAGS as HAS_TAGS, "
|
||||
+ " h.RES_DELETED_AT as RES_DELETED_AT, "
|
||||
+ " h.RES_PUBLISHED as RES_PUBLISHED, "
|
||||
+ " h.RES_UPDATED as RES_UPDATED, "
|
||||
+ " h.RES_TEXT as RES_TEXT, "
|
||||
+ " h.RES_TEXT_VC as RES_TEXT_VC, "
|
||||
+ " h.RES_ENCODING as RES_ENCODING, "
|
||||
+ " h.PARTITION_ID as PARTITION_ID, "
|
||||
+ " p.SOURCE_URI as PROV_SOURCE_URI,"
|
||||
+ " p.REQUEST_ID as PROV_REQUEST_ID,"
|
||||
+ " r.fhir_id as FHIR_ID "
|
||||
+ " r.FHIR_ID as FHIR_ID "
|
||||
+ "FROM HFJ_RESOURCE r "
|
||||
+ " INNER JOIN HFJ_RES_VER h ON r.res_id = h.res_id and r.res_ver = h.res_ver"
|
||||
+ " LEFT OUTER JOIN HFJ_RES_VER_PROV p ON p.res_ver_pid = h.pid ")
|
||||
+ " INNER JOIN HFJ_RES_VER h ON r.RES_ID = h.RES_ID and r.RES_VER = h.RES_VER"
|
||||
+ " LEFT OUTER JOIN HFJ_RES_VER_PROV p ON p.RES_VER_PID = h.PID ")
|
||||
public class ResourceSearchView implements IBaseResourceEntity, Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
|
|
@ -49,6 +49,7 @@ import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
|||
import ca.uhn.fhir.util.ClasspathUtil;
|
||||
import ca.uhn.fhir.util.VersionEnum;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.intellij.lang.annotations.Language;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
|
@ -413,6 +414,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
version.onTable("HFJ_IDX_CMB_TOK_NU")
|
||||
.addIndex("20240625.10", "IDX_IDXCMBTOKNU_HASHC")
|
||||
.unique(false)
|
||||
.online(true)
|
||||
.withColumns("HASH_COMPLETE", "RES_ID", "PARTITION_ID");
|
||||
version.onTable("HFJ_IDX_CMP_STRING_UNIQ")
|
||||
.addColumn("20240625.20", "HASH_COMPLETE")
|
||||
|
@ -468,6 +470,75 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.failureAllowed();
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
// Add target resource partition id/date columns to resource link
|
||||
Builder.BuilderWithTableName resourceLinkTable = version.onTable("HFJ_RES_LINK");
|
||||
|
||||
resourceLinkTable
|
||||
.addColumn("20240718.10", "TARGET_RES_PARTITION_ID")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.INT);
|
||||
resourceLinkTable
|
||||
.addColumn("20240718.20", "TARGET_RES_PARTITION_DATE")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.DATE_ONLY);
|
||||
}
|
||||
|
||||
{
|
||||
version.onTable(Search.HFJ_SEARCH)
|
||||
.modifyColumn("20240722.1", Search.SEARCH_UUID)
|
||||
.nonNullable()
|
||||
.withType(ColumnTypeEnum.STRING, 48);
|
||||
}
|
||||
|
||||
{
|
||||
final Builder.BuilderWithTableName hfjResource = version.onTable("HFJ_RESOURCE");
|
||||
|
||||
@Language(("SQL"))
|
||||
final String onlyIfSql = "SELECT CASE CHARINDEX('_CI_', COLLATION_NAME) WHEN 0 THEN 0 ELSE 1 END "
|
||||
+ "FROM INFORMATION_SCHEMA.COLUMNS "
|
||||
+ "WHERE TABLE_SCHEMA = SCHEMA_NAME() "
|
||||
+ "AND TABLE_NAME = 'HFJ_RESOURCE' "
|
||||
+ "AND COLUMN_NAME = 'FHIR_ID' ";
|
||||
final String onlyfIReason =
|
||||
"Skipping change to HFJ_RESOURCE.FHIR_ID collation to SQL_Latin1_General_CP1_CS_AS because it is already using it";
|
||||
|
||||
hfjResource
|
||||
.dropIndex("20240724.10", "IDX_RES_FHIR_ID")
|
||||
.onlyAppliesToPlatforms(DriverTypeEnum.MSSQL_2012)
|
||||
.onlyIf(onlyIfSql, onlyfIReason);
|
||||
|
||||
hfjResource
|
||||
.dropIndex("20240724.20", "IDX_RES_TYPE_FHIR_ID")
|
||||
.onlyAppliesToPlatforms(DriverTypeEnum.MSSQL_2012)
|
||||
.onlyIf(onlyIfSql, onlyfIReason);
|
||||
|
||||
version.executeRawSql(
|
||||
"20240724.30",
|
||||
"ALTER TABLE HFJ_RESOURCE ALTER COLUMN FHIR_ID varchar(64) COLLATE SQL_Latin1_General_CP1_CS_AS")
|
||||
.onlyAppliesToPlatforms(DriverTypeEnum.MSSQL_2012)
|
||||
.onlyIf(onlyIfSql, onlyfIReason);
|
||||
|
||||
hfjResource
|
||||
.addIndex("20240724.40", "IDX_RES_FHIR_ID")
|
||||
.unique(false)
|
||||
.online(true)
|
||||
.withColumns("FHIR_ID")
|
||||
.onlyAppliesToPlatforms(DriverTypeEnum.MSSQL_2012)
|
||||
.onlyIf(onlyIfSql, onlyfIReason);
|
||||
|
||||
hfjResource
|
||||
.addIndex("20240724.50", "IDX_RES_TYPE_FHIR_ID")
|
||||
.unique(true)
|
||||
.online(true)
|
||||
// include res_id and our deleted flag so we can satisfy Observation?_sort=_id from the index on
|
||||
// platforms that support it.
|
||||
.includeColumns("RES_ID, RES_DELETED_AT")
|
||||
.withColumns("RES_TYPE", "FHIR_ID")
|
||||
.onlyAppliesToPlatforms(DriverTypeEnum.MSSQL_2012)
|
||||
.onlyIf(onlyIfSql, onlyfIReason);
|
||||
}
|
||||
}
|
||||
|
||||
protected void init720() {
|
||||
|
@ -971,7 +1042,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
version.onTable(Search.HFJ_SEARCH)
|
||||
.addColumn("20230215.1", Search.SEARCH_UUID)
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.STRING, Search.SEARCH_UUID_COLUMN_LENGTH);
|
||||
.type(ColumnTypeEnum.STRING, 48)
|
||||
.doNothing(); // This migration used add instead of modify, so was skipped. See 20240722 for modify.
|
||||
version.onTable(BulkImportJobEntity.HFJ_BLK_IMPORT_JOB)
|
||||
.addColumn("20230215.2", BulkImportJobEntity.JOB_ID)
|
||||
.nullable()
|
||||
|
@ -3091,7 +3163,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
version.onTable("HFJ_SEARCH")
|
||||
.modifyColumn("20190814.7", "SEARCH_UUID")
|
||||
.nonNullable()
|
||||
.withType(ColumnTypeEnum.STRING, Search.SEARCH_UUID_COLUMN_LENGTH);
|
||||
.withType(ColumnTypeEnum.STRING, 36);
|
||||
|
||||
version.onTable("HFJ_SEARCH_PARM").dropThisTable("20190814.8");
|
||||
|
||||
|
|
|
@ -20,18 +20,26 @@
|
|||
package ca.uhn.fhir.jpa.model.cross;
|
||||
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
public class JpaResourceLookup implements IResourceLookup<JpaPid> {
|
||||
|
||||
private final String myResourceType;
|
||||
private final Long myResourcePid;
|
||||
private final Date myDeletedAt;
|
||||
private final PartitionablePartitionId myPartitionablePartitionId;
|
||||
|
||||
public JpaResourceLookup(String theResourceType, Long theResourcePid, Date theDeletedAt) {
|
||||
public JpaResourceLookup(
|
||||
String theResourceType,
|
||||
Long theResourcePid,
|
||||
Date theDeletedAt,
|
||||
PartitionablePartitionId thePartitionablePartitionId) {
|
||||
myResourceType = theResourceType;
|
||||
myResourcePid = theResourcePid;
|
||||
myDeletedAt = theDeletedAt;
|
||||
myPartitionablePartitionId = thePartitionablePartitionId;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -46,6 +54,9 @@ public class JpaResourceLookup implements IResourceLookup<JpaPid> {
|
|||
|
||||
@Override
|
||||
public JpaPid getPersistentId() {
|
||||
return JpaPid.fromId(myResourcePid);
|
||||
JpaPid jpaPid = JpaPid.fromId(myResourcePid);
|
||||
jpaPid.setPartitionablePartitionId(myPartitionablePartitionId);
|
||||
|
||||
return jpaPid;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
|||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao;
|
||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.dao.validation.SearchParameterDaoValidator;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity;
|
||||
import ca.uhn.fhir.jpa.packages.loader.PackageResourceParsingSvc;
|
||||
|
@ -47,8 +48,10 @@ import ca.uhn.fhir.rest.param.StringParam;
|
|||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.UriParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import ca.uhn.fhir.util.SearchParameterUtil;
|
||||
import ca.uhn.hapi.converters.canonical.VersionCanonicalizer;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
@ -73,7 +76,6 @@ import java.util.Optional;
|
|||
|
||||
import static ca.uhn.fhir.jpa.packages.util.PackageUtils.DEFAULT_INSTALL_TYPES;
|
||||
import static ca.uhn.fhir.util.SearchParameterUtil.getBaseAsStrings;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
/**
|
||||
* @since 5.1.0
|
||||
|
@ -117,6 +119,12 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||
@Autowired
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
@Autowired
|
||||
private SearchParameterDaoValidator mySearchParameterDaoValidator;
|
||||
|
||||
@Autowired
|
||||
private VersionCanonicalizer myVersionCanonicalizer;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
|
@ -431,6 +439,23 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||
return outcome != null && !outcome.isNop();
|
||||
}
|
||||
|
||||
/*
|
||||
* This function helps preserve the resource types in the base of an existing SP when an overriding SP's base
|
||||
* covers only a subset of the existing base.
|
||||
*
|
||||
* For example, say for an existing SP,
|
||||
* - the current base is: [ResourceTypeA, ResourceTypeB]
|
||||
* - the new base is: [ResourceTypeB]
|
||||
*
|
||||
* If we were to overwrite the existing SP's base to the new base ([ResourceTypeB]) then the
|
||||
* SP would stop working on ResourceTypeA, which would be a loss of functionality.
|
||||
*
|
||||
* Instead, this function updates the existing SP's base by removing the resource types that
|
||||
* are covered by the overriding SP.
|
||||
* In our example, this function updates the existing SP's base to [ResourceTypeA], so that the existing SP
|
||||
* still works on ResourceTypeA, and the caller then creates a new SP that covers ResourceTypeB.
|
||||
* https://github.com/hapifhir/hapi-fhir/issues/5366
|
||||
*/
|
||||
private boolean updateExistingResourceIfNecessary(
|
||||
IFhirResourceDao theDao, IBaseResource theResource, IBaseResource theExistingResource) {
|
||||
if (!"SearchParameter".equals(theResource.getClass().getSimpleName())) {
|
||||
|
@ -506,33 +531,9 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||
|
||||
boolean validForUpload(IBaseResource theResource) {
|
||||
String resourceType = myFhirContext.getResourceType(theResource);
|
||||
if ("SearchParameter".equals(resourceType)) {
|
||||
|
||||
String code = SearchParameterUtil.getCode(myFhirContext, theResource);
|
||||
if (!isBlank(code) && code.startsWith("_")) {
|
||||
ourLog.warn(
|
||||
"Failed to validate resource of type {} with url {} - Error: Resource code starts with \"_\"",
|
||||
theResource.fhirType(),
|
||||
SearchParameterUtil.getURL(myFhirContext, theResource));
|
||||
return false;
|
||||
}
|
||||
|
||||
String expression = SearchParameterUtil.getExpression(myFhirContext, theResource);
|
||||
if (isBlank(expression)) {
|
||||
ourLog.warn(
|
||||
"Failed to validate resource of type {} with url {} - Error: Resource expression is blank",
|
||||
theResource.fhirType(),
|
||||
SearchParameterUtil.getURL(myFhirContext, theResource));
|
||||
return false;
|
||||
}
|
||||
|
||||
if (getBaseAsStrings(myFhirContext, theResource).isEmpty()) {
|
||||
ourLog.warn(
|
||||
"Failed to validate resource of type {} with url {} - Error: Resource base is empty",
|
||||
theResource.fhirType(),
|
||||
SearchParameterUtil.getURL(myFhirContext, theResource));
|
||||
return false;
|
||||
}
|
||||
if ("SearchParameter".equals(resourceType) && !isValidSearchParameter(theResource)) {
|
||||
// this is an invalid search parameter
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!isValidResourceStatusForPackageUpload(theResource)) {
|
||||
|
@ -546,6 +547,21 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||
return true;
|
||||
}
|
||||
|
||||
private boolean isValidSearchParameter(IBaseResource theResource) {
|
||||
try {
|
||||
org.hl7.fhir.r5.model.SearchParameter searchParameter =
|
||||
myVersionCanonicalizer.searchParameterToCanonical(theResource);
|
||||
mySearchParameterDaoValidator.validate(searchParameter);
|
||||
return true;
|
||||
} catch (UnprocessableEntityException unprocessableEntityException) {
|
||||
ourLog.error(
|
||||
"The SearchParameter with URL {} is invalid. Validation Error: {}",
|
||||
SearchParameterUtil.getURL(myFhirContext, theResource),
|
||||
unprocessableEntityException.getMessage());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* For resources like {@link org.hl7.fhir.r4.model.Subscription}, {@link org.hl7.fhir.r4.model.DocumentReference},
|
||||
* and {@link org.hl7.fhir.r4.model.Communication}, the status field doesn't necessarily need to be set to 'active'
|
||||
|
@ -569,9 +585,13 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||
List<IPrimitiveType> statusTypes =
|
||||
myFhirContext.newFhirPath().evaluate(theResource, "status", IPrimitiveType.class);
|
||||
// Resource does not have a status field
|
||||
if (statusTypes.isEmpty()) return true;
|
||||
if (statusTypes.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
// Resource has a null status field
|
||||
if (statusTypes.get(0).getValue() == null) return false;
|
||||
if (statusTypes.get(0).getValue() == null) {
|
||||
return false;
|
||||
}
|
||||
// Resource has a status, and we need to check based on type
|
||||
switch (theResource.fhirType()) {
|
||||
case "Subscription":
|
||||
|
|
|
@ -475,6 +475,9 @@ public class TerminologyUploaderProvider extends BaseJpaProvider {
|
|||
}
|
||||
|
||||
private static String csvEscape(String theValue) {
|
||||
if (theValue == null) {
|
||||
return "";
|
||||
}
|
||||
return '"' + theValue.replace("\"", "\"\"").replace("\n", "\\n").replace("\r", "") + '"';
|
||||
}
|
||||
}
|
||||
|
|
|
@ -101,7 +101,6 @@ import ca.uhn.fhir.util.StringUtil;
|
|||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Streams;
|
||||
import com.healthmarketscience.sqlbuilder.Condition;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
|
@ -141,7 +140,9 @@ import java.util.stream.Collectors;
|
|||
import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE;
|
||||
import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION;
|
||||
import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with;
|
||||
import static ca.uhn.fhir.jpa.util.InClauseNormalizer.*;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
@ -205,9 +206,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
@Autowired(required = false)
|
||||
private IElasticsearchSvc myIElasticsearchSvc;
|
||||
|
||||
@Autowired
|
||||
private FhirContext myCtx;
|
||||
|
||||
@Autowired
|
||||
private IJpaStorageResourceParser myJpaStorageResourceParser;
|
||||
|
||||
|
@ -332,8 +330,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
init(theParams, theSearchUuid, theRequestPartitionId);
|
||||
|
||||
if (checkUseHibernateSearch()) {
|
||||
long count = myFulltextSearchSvc.count(myResourceName, theParams.clone());
|
||||
return count;
|
||||
return myFulltextSearchSvc.count(myResourceName, theParams.clone());
|
||||
}
|
||||
|
||||
List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), null, null, null, true, theRequest, null);
|
||||
|
@ -404,8 +401,16 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest);
|
||||
resultCount = fulltextMatchIds.size();
|
||||
} else {
|
||||
fulltextExecutor = myFulltextSearchSvc.searchNotScrolled(
|
||||
myResourceName, myParams, myMaxResultsToFetch, theRequest);
|
||||
// todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't
|
||||
// enabled SP indexing).
|
||||
// and some queries don't need JPA. We only need the scroll when we need to intersect with JPA.
|
||||
// It would be faster to have a non-scrolled search in this case, since creating the scroll requires
|
||||
// extra work in Elastic.
|
||||
// if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ...
|
||||
|
||||
// we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just
|
||||
// a page.
|
||||
fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest);
|
||||
}
|
||||
|
||||
if (fulltextExecutor == null) {
|
||||
|
@ -457,7 +462,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
// We break the pids into chunks that fit in the 1k limit for jdbc bind params.
|
||||
new QueryChunker<Long>()
|
||||
.chunk(
|
||||
Streams.stream(fulltextExecutor).collect(Collectors.toList()),
|
||||
fulltextExecutor,
|
||||
SearchBuilder.getMaximumPageSize(),
|
||||
t -> doCreateChunkedQueries(
|
||||
theParams, t, theOffset, sort, theCountOnlyFlag, theRequest, queries));
|
||||
}
|
||||
|
@ -560,8 +566,9 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
boolean theCount,
|
||||
RequestDetails theRequest,
|
||||
ArrayList<ISearchQueryExecutor> theQueries) {
|
||||
|
||||
if (thePids.size() < getMaximumPageSize()) {
|
||||
normalizeIdListForLastNInClause(thePids);
|
||||
thePids = normalizeIdListForInClause(thePids);
|
||||
}
|
||||
createChunkedQuery(theParams, sort, theOffset, thePids.size(), theCount, theRequest, thePids, theQueries);
|
||||
}
|
||||
|
@ -885,41 +892,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
&& theParams.values().stream()
|
||||
.flatMap(Collection::stream)
|
||||
.flatMap(Collection::stream)
|
||||
.anyMatch(t -> t instanceof ReferenceParam);
|
||||
}
|
||||
|
||||
private List<Long> normalizeIdListForLastNInClause(List<Long> lastnResourceIds) {
|
||||
/*
|
||||
The following is a workaround to a known issue involving Hibernate. If queries are used with "in" clauses with large and varying
|
||||
numbers of parameters, this can overwhelm Hibernate's QueryPlanCache and deplete heap space. See the following link for more info:
|
||||
https://stackoverflow.com/questions/31557076/spring-hibernate-query-plan-cache-memory-usage.
|
||||
|
||||
Normalizing the number of parameters in the "in" clause stabilizes the size of the QueryPlanCache, so long as the number of
|
||||
arguments never exceeds the maximum specified below.
|
||||
*/
|
||||
int listSize = lastnResourceIds.size();
|
||||
|
||||
if (listSize > 1 && listSize < 10) {
|
||||
padIdListWithPlaceholders(lastnResourceIds, 10);
|
||||
} else if (listSize > 10 && listSize < 50) {
|
||||
padIdListWithPlaceholders(lastnResourceIds, 50);
|
||||
} else if (listSize > 50 && listSize < 100) {
|
||||
padIdListWithPlaceholders(lastnResourceIds, 100);
|
||||
} else if (listSize > 100 && listSize < 200) {
|
||||
padIdListWithPlaceholders(lastnResourceIds, 200);
|
||||
} else if (listSize > 200 && listSize < 500) {
|
||||
padIdListWithPlaceholders(lastnResourceIds, 500);
|
||||
} else if (listSize > 500 && listSize < 800) {
|
||||
padIdListWithPlaceholders(lastnResourceIds, 800);
|
||||
}
|
||||
|
||||
return lastnResourceIds;
|
||||
}
|
||||
|
||||
private void padIdListWithPlaceholders(List<Long> theIdList, int preferredListSize) {
|
||||
while (theIdList.size() < preferredListSize) {
|
||||
theIdList.add(-1L);
|
||||
}
|
||||
.anyMatch(ReferenceParam.class::isInstance);
|
||||
}
|
||||
|
||||
private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) {
|
||||
|
@ -1155,7 +1128,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
|
||||
List<Long> versionlessPids = JpaPid.toLongList(thePids);
|
||||
if (versionlessPids.size() < getMaximumPageSize()) {
|
||||
versionlessPids = normalizeIdListForLastNInClause(versionlessPids);
|
||||
versionlessPids = normalizeIdListForInClause(versionlessPids);
|
||||
}
|
||||
|
||||
// -- get the resource from the searchView
|
||||
|
@ -1244,7 +1217,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
Map<Long, Collection<ResourceTag>> tagMap = new HashMap<>();
|
||||
|
||||
// -- no tags
|
||||
if (thePidList.size() == 0) return tagMap;
|
||||
if (thePidList.isEmpty()) return tagMap;
|
||||
|
||||
// -- get all tags for the idList
|
||||
Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(thePidList);
|
||||
|
@ -1384,7 +1357,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
EntityManager entityManager = theParameters.getEntityManager();
|
||||
Integer maxCount = theParameters.getMaxCount();
|
||||
FhirContext fhirContext = theParameters.getFhirContext();
|
||||
DateRangeParam lastUpdated = theParameters.getLastUpdated();
|
||||
RequestDetails request = theParameters.getRequestDetails();
|
||||
String searchIdOrDescription = theParameters.getSearchIdOrDescription();
|
||||
List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes();
|
||||
|
@ -1927,11 +1899,10 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
}
|
||||
assert !targetResourceTypes.isEmpty();
|
||||
|
||||
Set<Long> identityHashesForTypes = targetResourceTypes.stream()
|
||||
return targetResourceTypes.stream()
|
||||
.map(type -> BaseResourceIndexedSearchParam.calculateHashIdentity(
|
||||
myPartitionSettings, myRequestPartitionId, type, "url"))
|
||||
.collect(Collectors.toSet());
|
||||
return identityHashesForTypes;
|
||||
}
|
||||
|
||||
private <T> List<Collection<T>> partition(Collection<T> theNextRoundMatches, int theMaxLoad) {
|
||||
|
@ -2040,8 +2011,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
IQueryParameterType nextOr = nextPermutation.get(paramIndex);
|
||||
String nextOrValue = nextOr.getValueAsQueryToken(myContext);
|
||||
|
||||
RuntimeSearchParam nextParamDef =
|
||||
mySearchParamRegistry.getActiveSearchParam(myResourceName, nextParamName, ISearchParamRegistry.ContextEnum.SEARCH);
|
||||
RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(
|
||||
myResourceName, nextParamName, ISearchParamRegistry.ContextEnum.SEARCH);
|
||||
if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) {
|
||||
if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) {
|
||||
nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue);
|
||||
|
@ -2512,7 +2483,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
|
||||
private void retrieveNextIteratorQuery() {
|
||||
close();
|
||||
if (myQueryList != null && !myQueryList.isEmpty()) {
|
||||
if (isNotEmpty(myQueryList)) {
|
||||
myResultsIterator = myQueryList.remove(0);
|
||||
myHasNextIteratorQuery = true;
|
||||
} else {
|
||||
|
|
|
@ -43,9 +43,10 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
|||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static ca.uhn.fhir.jpa.model.entity.PersistedResourceModifiedMessageEntityPK.with;
|
||||
|
@ -82,8 +83,10 @@ public class ResourceModifiedMessagePersistenceSvcImpl implements IResourceModif
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<IPersistedResourceModifiedMessage> findAllOrderedByCreatedTime() {
|
||||
return myHapiTransactionService.withSystemRequest().execute(myResourceModifiedDao::findAllOrderedByCreatedTime);
|
||||
public Page<IPersistedResourceModifiedMessage> findAllOrderedByCreatedTime(Pageable thePageable) {
|
||||
return myHapiTransactionService.withSystemRequest().execute(() -> {
|
||||
return myResourceModifiedDao.findAllOrderedByCreatedTime(thePageable);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
package ca.uhn.fhir.jpa.util;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/*
|
||||
This class encapsulate the implementation providing a workaround to a known issue involving Hibernate. If queries are used with "in" clauses with large and varying
|
||||
numbers of parameters, this can overwhelm Hibernate's QueryPlanCache and deplete heap space. See the following link for more info:
|
||||
https://stackoverflow.com/questions/31557076/spring-hibernate-query-plan-cache-memory-usage.
|
||||
|
||||
Normalizing the number of parameters in the "in" clause stabilizes the size of the QueryPlanCache, so long as the number of
|
||||
arguments never exceeds the maximum specified below.
|
||||
*/
|
||||
public class InClauseNormalizer {
|
||||
|
||||
public static List<Long> normalizeIdListForInClause(List<Long> theResourceIds) {
|
||||
|
||||
List<Long> retVal = theResourceIds;
|
||||
|
||||
int listSize = theResourceIds.size();
|
||||
|
||||
if (listSize > 1 && listSize < 10) {
|
||||
retVal = padIdListWithPlaceholders(theResourceIds, 10);
|
||||
} else if (listSize > 10 && listSize < 50) {
|
||||
retVal = padIdListWithPlaceholders(theResourceIds, 50);
|
||||
} else if (listSize > 50 && listSize < 100) {
|
||||
retVal = padIdListWithPlaceholders(theResourceIds, 100);
|
||||
} else if (listSize > 100 && listSize < 200) {
|
||||
retVal = padIdListWithPlaceholders(theResourceIds, 200);
|
||||
} else if (listSize > 200 && listSize < 500) {
|
||||
retVal = padIdListWithPlaceholders(theResourceIds, 500);
|
||||
} else if (listSize > 500 && listSize < 800) {
|
||||
retVal = padIdListWithPlaceholders(theResourceIds, 800);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private static List<Long> padIdListWithPlaceholders(List<Long> theIdList, int preferredListSize) {
|
||||
List<Long> retVal = theIdList;
|
||||
|
||||
if (isUnmodifiableList(theIdList)) {
|
||||
retVal = new ArrayList<>(preferredListSize);
|
||||
retVal.addAll(theIdList);
|
||||
}
|
||||
|
||||
while (retVal.size() < preferredListSize) {
|
||||
retVal.add(-1L);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private static boolean isUnmodifiableList(List<Long> theList) {
|
||||
try {
|
||||
theList.addAll(Collections.emptyList());
|
||||
} catch (Exception e) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private InClauseNormalizer() {}
|
||||
}
|
|
@ -1,76 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.batch2;
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import org.assertj.core.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentMatchers;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class JpaJobPartitionProviderTest {
|
||||
@Mock
|
||||
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
@Mock
|
||||
private IPartitionLookupSvc myPartitionLookupSvc;
|
||||
@InjectMocks
|
||||
private JpaJobPartitionProvider myJobPartitionProvider;
|
||||
|
||||
@Test
|
||||
public void getPartitions_requestSpecificPartition_returnsPartition() {
|
||||
// setup
|
||||
SystemRequestDetails requestDetails = new SystemRequestDetails();
|
||||
String operation = ProviderConstants.OPERATION_EXPORT;
|
||||
|
||||
RequestPartitionId partitionId = RequestPartitionId.fromPartitionId(1);
|
||||
when(myRequestPartitionHelperSvc.determineReadPartitionForRequestForServerOperation(ArgumentMatchers.eq(requestDetails), ArgumentMatchers.eq(operation))).thenReturn(partitionId);
|
||||
|
||||
// test
|
||||
List <RequestPartitionId> partitionIds = myJobPartitionProvider.getPartitions(requestDetails, operation);
|
||||
|
||||
// verify
|
||||
Assertions.assertThat(partitionIds).hasSize(1);
|
||||
Assertions.assertThat(partitionIds).containsExactlyInAnyOrder(partitionId);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getPartitions_requestAllPartitions_returnsListOfAllSpecificPartitions() {
|
||||
// setup
|
||||
SystemRequestDetails requestDetails = new SystemRequestDetails();
|
||||
String operation = ProviderConstants.OPERATION_EXPORT;
|
||||
|
||||
when(myRequestPartitionHelperSvc.determineReadPartitionForRequestForServerOperation(ArgumentMatchers.eq(requestDetails), ArgumentMatchers.eq(operation)))
|
||||
.thenReturn( RequestPartitionId.allPartitions());
|
||||
List<RequestPartitionId> partitionIds = List.of(RequestPartitionId.fromPartitionIds(1), RequestPartitionId.fromPartitionIds(2));
|
||||
|
||||
List<PartitionEntity> partitionEntities = new ArrayList<>();
|
||||
partitionIds.forEach(partitionId -> {
|
||||
PartitionEntity entity = mock(PartitionEntity.class);
|
||||
when(entity.toRequestPartitionId()).thenReturn(partitionId);
|
||||
partitionEntities.add(entity);
|
||||
});
|
||||
when(myPartitionLookupSvc.listPartitions()).thenReturn(partitionEntities);
|
||||
List<RequestPartitionId> expectedPartitionIds = new ArrayList<>(partitionIds);
|
||||
expectedPartitionIds.add(RequestPartitionId.defaultPartition());
|
||||
|
||||
// test
|
||||
List<RequestPartitionId> actualPartitionIds = myJobPartitionProvider.getPartitions(requestDetails, operation);
|
||||
|
||||
// verify
|
||||
Assertions.assertThat(actualPartitionIds).hasSize(expectedPartitionIds.size());
|
||||
Assertions.assertThat(actualPartitionIds).containsExactlyInAnyOrder(expectedPartitionIds.toArray(new RequestPartitionId[0]));
|
||||
}
|
||||
}
|
|
@ -9,13 +9,20 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao;
|
||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.dao.tx.NonTransactionalHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.dao.validation.SearchParameterDaoValidator;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.packages.loader.PackageResourceParsingSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistryController;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.SearchParameterHelper;
|
||||
import ca.uhn.fhir.mdm.log.Logs;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.hapi.converters.canonical.VersionCanonicalizer;
|
||||
import ca.uhn.test.util.LogbackTestExtension;
|
||||
import ca.uhn.test.util.LogbackTestExtensionAssert;
|
||||
import ch.qos.logback.classic.Logger;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
|
@ -24,6 +31,7 @@ import org.hl7.fhir.r4.model.Communication;
|
|||
import org.hl7.fhir.r4.model.DocumentReference;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
import org.hl7.fhir.r4.model.Subscription;
|
||||
import org.hl7.fhir.utilities.npm.NpmPackage;
|
||||
|
@ -31,6 +39,7 @@ import org.hl7.fhir.utilities.npm.PackageGenerator;
|
|||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
|
@ -40,6 +49,7 @@ import org.mockito.InjectMocks;
|
|||
import org.mockito.Mock;
|
||||
import org.mockito.Spy;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -52,8 +62,14 @@ import java.util.Optional;
|
|||
import java.util.stream.Stream;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.params.provider.Arguments.arguments;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.doNothing;
|
||||
import static org.mockito.Mockito.doReturn;
|
||||
import static org.mockito.Mockito.doThrow;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
@ -63,6 +79,10 @@ public class PackageInstallerSvcImplTest {
|
|||
public static final String PACKAGE_VERSION = "1.0";
|
||||
public static final String PACKAGE_ID_1 = "package1";
|
||||
|
||||
|
||||
@RegisterExtension
|
||||
LogbackTestExtension myLogCapture = new LogbackTestExtension(LoggerFactory.getLogger(PackageInstallerSvcImpl.class));
|
||||
|
||||
@Mock
|
||||
private INpmPackageVersionDao myPackageVersionDao;
|
||||
@Mock
|
||||
|
@ -83,6 +103,13 @@ public class PackageInstallerSvcImplTest {
|
|||
private SearchParameterMap mySearchParameterMap;
|
||||
@Mock
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
@Mock
|
||||
private VersionCanonicalizer myVersionCanonicalizerMock;
|
||||
|
||||
@Mock
|
||||
private SearchParameterDaoValidator mySearchParameterDaoValidatorMock;
|
||||
|
||||
@Spy
|
||||
private FhirContext myCtx = FhirContext.forR4Cached();
|
||||
@Spy
|
||||
|
@ -91,6 +118,8 @@ public class PackageInstallerSvcImplTest {
|
|||
private PackageResourceParsingSvc myPackageResourceParsingSvc = new PackageResourceParsingSvc(myCtx);
|
||||
@Spy
|
||||
private PartitionSettings myPartitionSettings = new PartitionSettings();
|
||||
|
||||
|
||||
@InjectMocks
|
||||
private PackageInstallerSvcImpl mySvc;
|
||||
|
||||
|
@ -110,66 +139,97 @@ public class PackageInstallerSvcImplTest {
|
|||
|
||||
@Nested
|
||||
class ValidForUploadTest {
|
||||
|
||||
public static Stream<Arguments> parametersIsValidForUpload() {
|
||||
SearchParameter sp1 = new SearchParameter();
|
||||
sp1.setCode("_id");
|
||||
// Patient resource doesn't have a status element in FHIR spec
|
||||
Patient resourceWithNoStatusElementInSpec = new Patient();
|
||||
|
||||
SearchParameter sp2 = new SearchParameter();
|
||||
sp2.setCode("name");
|
||||
sp2.setExpression("Patient.name");
|
||||
sp2.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
SearchParameter spWithActiveStatus = new SearchParameter();
|
||||
spWithActiveStatus.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
|
||||
SearchParameter sp3 = new SearchParameter();
|
||||
sp3.setCode("name");
|
||||
sp3.addBase("Patient");
|
||||
sp3.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
SearchParameter spWithDraftStatus = new SearchParameter();
|
||||
spWithDraftStatus.setStatus(Enumerations.PublicationStatus.DRAFT);
|
||||
|
||||
SearchParameter sp4 = new SearchParameter();
|
||||
sp4.setCode("name");
|
||||
sp4.addBase("Patient");
|
||||
sp4.setExpression("Patient.name");
|
||||
sp4.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
|
||||
SearchParameter sp5 = new SearchParameter();
|
||||
sp5.setCode("name");
|
||||
sp5.addBase("Patient");
|
||||
sp5.setExpression("Patient.name");
|
||||
sp5.setStatus(Enumerations.PublicationStatus.DRAFT);
|
||||
SearchParameter spWithNullStatus = new SearchParameter();
|
||||
spWithNullStatus.setStatus(null);
|
||||
|
||||
return Stream.of(
|
||||
arguments(sp1, false, false),
|
||||
arguments(sp2, false, true),
|
||||
arguments(sp3, false, true),
|
||||
arguments(sp4, true, true),
|
||||
arguments(sp5, true, false),
|
||||
arguments(createSubscription(Subscription.SubscriptionStatus.REQUESTED), true, true),
|
||||
arguments(createSubscription(Subscription.SubscriptionStatus.ERROR), true, false),
|
||||
arguments(createSubscription(Subscription.SubscriptionStatus.ACTIVE), true, false),
|
||||
arguments(createDocumentReference(Enumerations.DocumentReferenceStatus.ENTEREDINERROR), true, true),
|
||||
arguments(createDocumentReference(Enumerations.DocumentReferenceStatus.NULL), true, false),
|
||||
arguments(createDocumentReference(null), true, false),
|
||||
arguments(createCommunication(Communication.CommunicationStatus.NOTDONE), true, true),
|
||||
arguments(createCommunication(Communication.CommunicationStatus.NULL), true, false),
|
||||
arguments(createCommunication(null), true, false));
|
||||
arguments(resourceWithNoStatusElementInSpec, true),
|
||||
arguments(spWithActiveStatus, true),
|
||||
arguments(spWithNullStatus, false),
|
||||
arguments(spWithDraftStatus, false),
|
||||
arguments(createSubscription(Subscription.SubscriptionStatus.REQUESTED), true),
|
||||
arguments(createSubscription(Subscription.SubscriptionStatus.ERROR), false),
|
||||
arguments(createSubscription(Subscription.SubscriptionStatus.ACTIVE), false),
|
||||
arguments(createDocumentReference(Enumerations.DocumentReferenceStatus.ENTEREDINERROR), true),
|
||||
arguments(createDocumentReference(Enumerations.DocumentReferenceStatus.NULL), false),
|
||||
arguments(createDocumentReference(null), false),
|
||||
arguments(createCommunication(Communication.CommunicationStatus.NOTDONE), true),
|
||||
arguments(createCommunication(Communication.CommunicationStatus.NULL), false),
|
||||
arguments(createCommunication(null), false));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource(value = "parametersIsValidForUpload")
|
||||
public void testValidForUpload_withResource(IBaseResource theResource,
|
||||
boolean theTheMeetsOtherFilterCriteria,
|
||||
boolean theMeetsStatusFilterCriteria) {
|
||||
if (theTheMeetsOtherFilterCriteria) {
|
||||
when(myStorageSettings.isValidateResourceStatusForPackageUpload()).thenReturn(true);
|
||||
public void testValidForUpload_WhenStatusValidationSettingIsEnabled_ValidatesResourceStatus(IBaseResource theResource,
|
||||
boolean theExpectedResultForStatusValidation) {
|
||||
if (theResource.fhirType().equals("SearchParameter")) {
|
||||
setupSearchParameterValidationMocksForSuccess();
|
||||
}
|
||||
assertEquals(theTheMeetsOtherFilterCriteria && theMeetsStatusFilterCriteria, mySvc.validForUpload(theResource));
|
||||
when(myStorageSettings.isValidateResourceStatusForPackageUpload()).thenReturn(true);
|
||||
assertEquals(theExpectedResultForStatusValidation, mySvc.validForUpload(theResource));
|
||||
}
|
||||
|
||||
if (theTheMeetsOtherFilterCriteria) {
|
||||
when(myStorageSettings.isValidateResourceStatusForPackageUpload()).thenReturn(false);
|
||||
@ParameterizedTest
|
||||
@MethodSource(value = "parametersIsValidForUpload")
|
||||
public void testValidForUpload_WhenStatusValidationSettingIsDisabled_DoesNotValidateResourceStatus(IBaseResource theResource) {
|
||||
if (theResource.fhirType().equals("SearchParameter")) {
|
||||
setupSearchParameterValidationMocksForSuccess();
|
||||
}
|
||||
assertEquals(theTheMeetsOtherFilterCriteria, mySvc.validForUpload(theResource));
|
||||
when(myStorageSettings.isValidateResourceStatusForPackageUpload()).thenReturn(false);
|
||||
//all resources should pass status validation in this case, so expect true always
|
||||
assertTrue(mySvc.validForUpload(theResource));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidForUpload_WhenSearchParameterIsInvalid_ReturnsFalse() {
|
||||
|
||||
final String validationExceptionMessage = "This SP is invalid!!";
|
||||
final String spURL = "http://myspurl.example/invalidsp";
|
||||
SearchParameter spR4 = new SearchParameter();
|
||||
spR4.setUrl(spURL);
|
||||
org.hl7.fhir.r5.model.SearchParameter spR5 = new org.hl7.fhir.r5.model.SearchParameter();
|
||||
|
||||
when(myVersionCanonicalizerMock.searchParameterToCanonical(spR4)).thenReturn(spR5);
|
||||
doThrow(new UnprocessableEntityException(validationExceptionMessage)).
|
||||
when(mySearchParameterDaoValidatorMock).validate(spR5);
|
||||
|
||||
assertFalse(mySvc.validForUpload(spR4));
|
||||
|
||||
final String expectedLogMessage = String.format(
|
||||
"The SearchParameter with URL %s is invalid. Validation Error: %s", spURL, validationExceptionMessage);
|
||||
LogbackTestExtensionAssert.assertThat(myLogCapture).hasErrorMessage(expectedLogMessage);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidForUpload_WhenSearchParameterValidatorThrowsAnExceptionOtherThanUnprocessableEntityException_ThenThrows() {
|
||||
|
||||
SearchParameter spR4 = new SearchParameter();
|
||||
org.hl7.fhir.r5.model.SearchParameter spR5 = new org.hl7.fhir.r5.model.SearchParameter();
|
||||
|
||||
RuntimeException notAnUnprocessableEntityException = new RuntimeException("should not be caught");
|
||||
when(myVersionCanonicalizerMock.searchParameterToCanonical(spR4)).thenReturn(spR5);
|
||||
doThrow(notAnUnprocessableEntityException).
|
||||
when(mySearchParameterDaoValidatorMock).validate(spR5);
|
||||
|
||||
Exception actualExceptionThrown = assertThrows(Exception.class, () -> mySvc.validForUpload(spR4));
|
||||
assertEquals(notAnUnprocessableEntityException, actualExceptionThrown);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testDontTryToInstallDuplicateCodeSystem_CodeSystemAlreadyExistsWithDifferentId() throws IOException {
|
||||
// Setup
|
||||
|
@ -296,6 +356,11 @@ public class PackageInstallerSvcImplTest {
|
|||
return pkg;
|
||||
}
|
||||
|
||||
private void setupSearchParameterValidationMocksForSuccess() {
|
||||
when(myVersionCanonicalizerMock.searchParameterToCanonical(any())).thenReturn(new org.hl7.fhir.r5.model.SearchParameter());
|
||||
doNothing().when(mySearchParameterDaoValidatorMock).validate(any());
|
||||
}
|
||||
|
||||
private static SearchParameter createSearchParameter(String theId, Collection<String> theBase) {
|
||||
SearchParameter searchParameter = new SearchParameter();
|
||||
if (theId != null) {
|
||||
|
@ -330,4 +395,5 @@ public class PackageInstallerSvcImplTest {
|
|||
communication.setStatus(theCommunicationStatus);
|
||||
return communication;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue