mergeback rel_7_0 to master
This commit is contained in:
commit
b395039e18
|
@ -29,4 +29,6 @@ import com.fasterxml.jackson.annotation.JsonInclude;
|
|||
getterVisibility = JsonAutoDetect.Visibility.NONE,
|
||||
isGetterVisibility = JsonAutoDetect.Visibility.NONE,
|
||||
setterVisibility = JsonAutoDetect.Visibility.NONE)
|
||||
public interface IModelJson {}
|
||||
public interface IModelJson {
|
||||
String SENSITIVE_DATA_FILTER_NAME = "sensitiveDataFilter";
|
||||
}
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.model.api.annotation;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Annotation to mark a field as sensitive, indicating that it should not
|
||||
* be displayed or serialized by jackson. The only way to serialize an object annotated with this annotation is to use
|
||||
* {@link ca.uhn.fhir.util.JsonUtil}, as it has a registered filter against this annotation.
|
||||
*/
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.FIELD)
|
||||
public @interface SensitiveNoDisplay {}
|
|
@ -91,7 +91,7 @@ public enum BundleTypeEnum {
|
|||
/**
|
||||
* Returns the enumerated value associated with this code
|
||||
*/
|
||||
public BundleTypeEnum forCode(String theCode) {
|
||||
public static BundleTypeEnum forCode(String theCode) {
|
||||
BundleTypeEnum retVal = CODE_TO_ENUM.get(theCode);
|
||||
return retVal;
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
|
|||
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.model.primitive.CodeDt;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.ICompositeType;
|
||||
|
@ -41,8 +42,8 @@ public class AttachmentUtil {
|
|||
return getOrCreateChild(theContext, theAttachment, "data", "base64Binary");
|
||||
}
|
||||
|
||||
public static IPrimitiveType<String> getOrCreateContentType(FhirContext theContext, ICompositeType theAttachment) {
|
||||
return getOrCreateChild(theContext, theAttachment, "contentType", "string");
|
||||
public static IPrimitiveType<CodeDt> getOrCreateContentType(FhirContext theContext, ICompositeType theAttachment) {
|
||||
return getOrCreateChild(theContext, theAttachment, "contentType", "code");
|
||||
}
|
||||
|
||||
public static IPrimitiveType<String> getOrCreateUrl(FhirContext theContext, ICompositeType theAttachment) {
|
||||
|
|
|
@ -251,10 +251,22 @@ public class BundleBuilder {
|
|||
* @param theResource The resource to create
|
||||
*/
|
||||
public CreateBuilder addTransactionCreateEntry(IBaseResource theResource) {
|
||||
return addTransactionCreateEntry(theResource, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an entry containing an create (POST) request.
|
||||
* Also sets the Bundle.type value to "transaction" if it is not already set.
|
||||
*
|
||||
* @param theResource The resource to create
|
||||
* @param theFullUrl The fullUrl to attach to the entry. If null, will default to the resource ID.
|
||||
*/
|
||||
public CreateBuilder addTransactionCreateEntry(IBaseResource theResource, @Nullable String theFullUrl) {
|
||||
setBundleField("type", "transaction");
|
||||
|
||||
IBase request =
|
||||
addEntryAndReturnRequest(theResource, theResource.getIdElement().getValue());
|
||||
IBase request = addEntryAndReturnRequest(
|
||||
theResource,
|
||||
theFullUrl != null ? theFullUrl : theResource.getIdElement().getValue());
|
||||
|
||||
String resourceType = myContext.getResourceType(theResource);
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
|
@ -235,6 +236,14 @@ public class BundleUtil {
|
|||
return null;
|
||||
}
|
||||
|
||||
public static BundleTypeEnum getBundleTypeEnum(FhirContext theContext, IBaseBundle theBundle) {
|
||||
String bundleTypeCode = BundleUtil.getBundleType(theContext, theBundle);
|
||||
if (isBlank(bundleTypeCode)) {
|
||||
return null;
|
||||
}
|
||||
return BundleTypeEnum.forCode(bundleTypeCode);
|
||||
}
|
||||
|
||||
public static void setBundleType(FhirContext theContext, IBaseBundle theBundle, String theType) {
|
||||
RuntimeResourceDefinition def = theContext.getResourceDefinition(theBundle);
|
||||
BaseRuntimeChildDefinition entryChild = def.getChildByName("type");
|
||||
|
|
|
@ -21,15 +21,23 @@ package ca.uhn.fhir.util;
|
|||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import ca.uhn.fhir.model.api.annotation.SensitiveNoDisplay;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||
import com.fasterxml.jackson.databind.ser.FilterProvider;
|
||||
import com.fasterxml.jackson.databind.ser.PropertyWriter;
|
||||
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
|
||||
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
|
||||
import jakarta.annotation.Nonnull;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.StringWriter;
|
||||
import java.io.Writer;
|
||||
import java.util.List;
|
||||
|
@ -38,15 +46,30 @@ public class JsonUtil {
|
|||
|
||||
private static final ObjectMapper ourMapperPrettyPrint;
|
||||
private static final ObjectMapper ourMapperNonPrettyPrint;
|
||||
private static final ObjectMapper ourMapperIncludeSensitive;
|
||||
|
||||
public static final SimpleBeanPropertyFilter SIMPLE_BEAN_PROPERTY_FILTER = new SensitiveDataFilter();
|
||||
|
||||
public static final SimpleFilterProvider SENSITIVE_DATA_FILTER_PROVIDER =
|
||||
new SimpleFilterProvider().addFilter(IModelJson.SENSITIVE_DATA_FILTER_NAME, SIMPLE_BEAN_PROPERTY_FILTER);
|
||||
public static final SimpleFilterProvider SHOW_ALL_DATA_FILTER_PROVIDER = new SimpleFilterProvider()
|
||||
.addFilter(IModelJson.SENSITIVE_DATA_FILTER_NAME, SimpleBeanPropertyFilter.serializeAll());
|
||||
|
||||
static {
|
||||
ourMapperPrettyPrint = new ObjectMapper();
|
||||
ourMapperPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
ourMapperPrettyPrint.setFilterProvider(SENSITIVE_DATA_FILTER_PROVIDER);
|
||||
ourMapperPrettyPrint.enable(SerializationFeature.INDENT_OUTPUT);
|
||||
|
||||
ourMapperNonPrettyPrint = new ObjectMapper();
|
||||
ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
ourMapperNonPrettyPrint.setFilterProvider(SENSITIVE_DATA_FILTER_PROVIDER);
|
||||
ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT);
|
||||
|
||||
ourMapperIncludeSensitive = new ObjectMapper();
|
||||
ourMapperIncludeSensitive.setFilterProvider(SHOW_ALL_DATA_FILTER_PROVIDER);
|
||||
ourMapperIncludeSensitive.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
ourMapperIncludeSensitive.disable(SerializationFeature.INDENT_OUTPUT);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -67,6 +90,24 @@ public class JsonUtil {
|
|||
public static <T> List<T> deserializeList(@Nonnull String theInput, @Nonnull Class<T> theType) throws IOException {
|
||||
return ourMapperPrettyPrint.readerForListOf(theType).readValue(theInput);
|
||||
}
|
||||
/**
|
||||
* Parse JSON
|
||||
*/
|
||||
public static <T> T deserialize(@Nonnull InputStream theInput, @Nonnull Class<T> theType) throws IOException {
|
||||
return ourMapperPrettyPrint.readerFor(theType).readValue(theInput);
|
||||
}
|
||||
|
||||
/**
|
||||
* Includes fields which are annotated with {@link SensitiveNoDisplay}. Currently only meant to be used for serialization
|
||||
* for batch job parameters.
|
||||
*/
|
||||
public static String serializeWithSensitiveData(@Nonnull IModelJson theInput) {
|
||||
try {
|
||||
return ourMapperIncludeSensitive.writeValueAsString(theInput);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new InvalidRequestException(Msg.code(2487) + "Failed to encode " + theInput.getClass(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode JSON
|
||||
|
@ -93,6 +134,10 @@ public class JsonUtil {
|
|||
}
|
||||
}
|
||||
|
||||
public FilterProvider getSensitiveDataFilterProvider() {
|
||||
return SENSITIVE_DATA_FILTER_PROVIDER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode JSON
|
||||
*/
|
||||
|
@ -111,4 +156,26 @@ public class JsonUtil {
|
|||
throw new InvalidRequestException(Msg.code(1741) + "Failed to encode " + theJson.getClass(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private static class SensitiveDataFilter extends SimpleBeanPropertyFilter {
|
||||
|
||||
@Override
|
||||
protected boolean include(PropertyWriter writer) {
|
||||
return true; // Default include all except explicitly checked and excluded
|
||||
}
|
||||
|
||||
@Override
|
||||
public void serializeAsField(Object pojo, JsonGenerator gen, SerializerProvider provider, PropertyWriter writer)
|
||||
throws Exception {
|
||||
if (include(writer)) {
|
||||
if (!isFieldSensitive(writer)) {
|
||||
super.serializeAsField(pojo, gen, provider, writer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isFieldSensitive(PropertyWriter writer) {
|
||||
return writer.getAnnotation(SensitiveNoDisplay.class) != null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -135,6 +135,8 @@ public enum VersionEnum {
|
|||
V6_11_0,
|
||||
|
||||
V7_0_0,
|
||||
V7_0_1,
|
||||
|
||||
V7_1_0,
|
||||
V7_2_0;
|
||||
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import ca.uhn.fhir.model.api.annotation.SensitiveNoDisplay;
|
||||
import com.fasterxml.jackson.annotation.JsonFilter;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.hamcrest.CoreMatchers.not;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
||||
class JsonUtilTest {
|
||||
|
||||
@JsonFilter(IModelJson.SENSITIVE_DATA_FILTER_NAME)
|
||||
class TestObject implements IModelJson {
|
||||
@JsonProperty("sensitiveField")
|
||||
@SensitiveNoDisplay
|
||||
private String mySensitiveField;
|
||||
|
||||
@JsonProperty(value = "publicField")
|
||||
private String myPublicField;
|
||||
|
||||
public String getPrivateField() {
|
||||
return mySensitiveField;
|
||||
}
|
||||
|
||||
public void setSensitiveField(String thePrivateField) {
|
||||
this.mySensitiveField = thePrivateField;
|
||||
}
|
||||
|
||||
public String getPublicField() {
|
||||
return myPublicField;
|
||||
}
|
||||
|
||||
public void setPublicField(String thePublicField) {
|
||||
this.myPublicField = thePublicField;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSensitiveNoDisplayAnnotationIsHiddenFromBasicSerialization() {
|
||||
TestObject object = new TestObject();
|
||||
object.setPublicField("Public Value!");
|
||||
object.setSensitiveField("Sensitive Value!");
|
||||
|
||||
String sensitiveExcluded = JsonUtil.serializeOrInvalidRequest(object);
|
||||
assertThat(sensitiveExcluded, is(not(containsString("Sensitive Value!"))));
|
||||
|
||||
String sensitiveIncluded = JsonUtil.serializeWithSensitiveData(object);
|
||||
assertThat(sensitiveIncluded, is(containsString("Sensitive Value!")));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5537
|
||||
title: "Calling the method getOrCreateContentType in AttachmentUtil on an attachment with no content type would throw exception because contentType is a code not a string.
|
||||
This fixes the function to create an empty code as expected"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5547
|
||||
title: "The addition of the indexes `idx_sp_uri_hash_identity_pattern_ops` and `idx_sp_string_hash_nrm_pattern_ops` could occasionally timeout during migration in Postgresql on large databases, leaving the migration table in a failed state, and Smile CDR unable to boot.
|
||||
Now existence of the index is checked before attempting to add it again."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5603
|
||||
jira: SMILE-8000
|
||||
title: "Previously, the semantics of `is-a` were incorrect in Valueset Expansion. The implementation previously used the behaviour of `descendent-of`, which means that `A is-a A` was not being considered as true. This has been corrected. In addition,
|
||||
`descendent-of` is now supported, which compares for strict descendency, and does not include itself. Thanks to Ole Hedegaard (@ohetrifork) for the fix."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5619
|
||||
jira: SMILE-7909
|
||||
title: "Previously, when a transaction was posted with a resource that had placeholder references and auto versioning
|
||||
references enabled for that path, if the target resource was included in the Bundle but not modified, the reference was
|
||||
saved with a version number that didn't exist. This has been fixed."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5621
|
||||
title: "Fixed a deadlock in resource conditional create."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5623
|
||||
title: "Previously, searches that used more than one chained `Bundle` `SearchParameter` (i.e. `Composition`) were only
|
||||
adding one condition to the underlying SQL query which resulted in incorrect search results. This has been fixed."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5626
|
||||
title: "Previously, an exception could be thrown by the container when executing a contextClosedEvent on the
|
||||
Scheduler Service. This issue has been fixed."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5632
|
||||
title: "Previously bulk export operation was returning an empty response when no resources matched the request, which
|
||||
didn't comply with [HL7 HAPI IG](https://hl7.org/fhir/uv/bulkdata/export/index.html#response---complete-status).
|
||||
This has been corrected."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5633
|
||||
title: "Smile failed to save resources running on Oracle when installed from 2023-02 or earlier.
|
||||
This has been fixed."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5634
|
||||
title: "Previously, expanding a 'ValueSet' with no concepts based on system `urn:ietf:bcp:13` would fail with
|
||||
`ExpansionCouldNotBeCompletedInternallyException`. This has been fixed."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5636
|
||||
jira: SMILE-7648
|
||||
title: "Previously, the number of threads allocated to the $expunge operation in certain cases could be more
|
||||
than configured, this would cause hundreds of threads to be created and all available database connections
|
||||
to be consumed. This has been fixed."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5640
|
||||
jira: SMILE-7977
|
||||
title: "Clinical reasoning version bump to address reported 'null pointer' error that is encountered when running $evaluate-measure against a measure with an omitted measure.group.population.id"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5642
|
||||
title: "A non-superuser with correct permissions encounters HAPI-0339 when POSTING a transaction Bundle with a PATCH.
|
||||
This has been fixed."
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5644
|
||||
title: "Previously, searching for `Bundle` resources with read all `Bundle` resources permissions, returned an
|
||||
HTTP 403 Forbidden error. This was because the `AuthorizationInterceptor` applied permissions to the resources inside
|
||||
the `Bundle`, instead of the `Bundle` itself. This has been fixed and permissions are no longer applied to the resources
|
||||
inside a `Bundle` of type `document`, `message`, or `collection` for `Bundle` requests."
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5649
|
||||
title: "Change database upgrade script to avoid holding locks while adding indices."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5651
|
||||
jira: SMILE-7855
|
||||
title: "Previously, conditional creates would fail with HAPI-0929 errors if there was no preceding '?'.
|
||||
This has been fixed."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
jira: SMILE-7216
|
||||
title: "Previously, the Bulk Import (`$import`) job was ignoring the `httpBasicCredentials` section of the incoming parameters
|
||||
object, causing the job to fail with a 403 error. This has been corrected."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5659
|
||||
title: "Previously, after registering built-in interceptor `PatientIdPartitionInterceptor`, the system bulk export
|
||||
(with no filters) operation would fail with a NullPointerException. This has been fixed."
|
|
@ -28,13 +28,13 @@ import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
|||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import jakarta.annotation.PreDestroy;
|
||||
import org.quartz.JobKey;
|
||||
import org.quartz.SchedulerException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.event.ContextClosedEvent;
|
||||
import org.springframework.context.event.ContextRefreshedEvent;
|
||||
import org.springframework.context.event.EventListener;
|
||||
import org.springframework.core.env.Environment;
|
||||
|
@ -177,7 +177,7 @@ public abstract class BaseSchedulerServiceImpl implements ISchedulerService {
|
|||
values.forEach(t -> t.scheduleJobs(this));
|
||||
}
|
||||
|
||||
@EventListener(ContextClosedEvent.class)
|
||||
@PreDestroy
|
||||
public void stop() {
|
||||
ourLog.info("Shutting down task scheduler...");
|
||||
|
||||
|
|
|
@ -75,4 +75,8 @@ public class HibernatePropertiesProvider {
|
|||
public DataSource getDataSource() {
|
||||
return myEntityManagerFactory.getDataSource();
|
||||
}
|
||||
|
||||
public boolean isOracleDialect() {
|
||||
return getDialect() instanceof org.hibernate.dialect.OracleDialect;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -51,6 +51,7 @@ import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
|
|||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.JpaStorageResourceParser;
|
||||
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
|
||||
import ca.uhn.fhir.jpa.dao.ResourceHistoryCalculator;
|
||||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceModifiedDao;
|
||||
|
@ -869,4 +870,10 @@ public class JpaConfig {
|
|||
public IMetaTagSorter metaTagSorter() {
|
||||
return new MetaTagSorterAlphabetical();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ResourceHistoryCalculator resourceHistoryCalculator(
|
||||
FhirContext theFhirContext, HibernatePropertiesProvider theHibernatePropertiesProvider) {
|
||||
return new ResourceHistoryCalculator(theFhirContext, theHibernatePropertiesProvider.isOracleDialect());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -85,7 +85,6 @@ import ca.uhn.fhir.model.api.TagList;
|
|||
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
|
@ -105,8 +104,6 @@ import com.google.common.annotations.VisibleForTesting;
|
|||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.google.common.hash.HashCode;
|
||||
import com.google.common.hash.HashFunction;
|
||||
import com.google.common.hash.Hashing;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
|
@ -264,6 +261,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
@Autowired
|
||||
private PlatformTransactionManager myTransactionManager;
|
||||
|
||||
@Autowired
|
||||
protected ResourceHistoryCalculator myResourceHistoryCalculator;
|
||||
|
||||
protected final CodingSpy myCodingSpy = new CodingSpy();
|
||||
|
||||
@VisibleForTesting
|
||||
|
@ -277,6 +277,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
mySearchParamPresenceSvc = theSearchParamPresenceSvc;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setResourceHistoryCalculator(ResourceHistoryCalculator theResourceHistoryCalculator) {
|
||||
myResourceHistoryCalculator = theResourceHistoryCalculator;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IInterceptorBroadcaster getInterceptorBroadcaster() {
|
||||
return myInterceptorBroadcaster;
|
||||
|
@ -643,6 +648,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
theEntity.setResourceType(toResourceName(theResource));
|
||||
}
|
||||
|
||||
byte[] resourceBinary;
|
||||
String resourceText;
|
||||
ResourceEncodingEnum encoding;
|
||||
boolean changed = false;
|
||||
|
@ -659,6 +665,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
if (address != null) {
|
||||
|
||||
encoding = ResourceEncodingEnum.ESR;
|
||||
resourceBinary = null;
|
||||
resourceText = address.getProviderId() + ":" + address.getLocation();
|
||||
changed = true;
|
||||
|
||||
|
@ -675,10 +682,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
theEntity.setFhirVersion(myContext.getVersion().getVersion());
|
||||
|
||||
HashFunction sha256 = Hashing.sha256();
|
||||
resourceText = encodeResource(theResource, encoding, excludeElements, myContext);
|
||||
encoding = ResourceEncodingEnum.JSON;
|
||||
HashCode hashCode = sha256.hashUnencodedChars(resourceText);
|
||||
// TODO: LD: Once 2024-02 it out the door we should consider further refactoring here to move
|
||||
// more of this logic within the calculator and eliminate more local variables
|
||||
final ResourceHistoryState calculate = myResourceHistoryCalculator.calculateResourceHistoryState(
|
||||
theResource, encoding, excludeElements);
|
||||
|
||||
resourceText = calculate.getResourceText();
|
||||
resourceBinary = calculate.getResourceBinary();
|
||||
encoding = calculate.getEncoding(); // This may be a no-op
|
||||
final HashCode hashCode = calculate.getHashCode();
|
||||
|
||||
String hashSha256 = hashCode.toString();
|
||||
if (!hashSha256.equals(theEntity.getHashSha256())) {
|
||||
|
@ -696,6 +708,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
} else {
|
||||
|
||||
encoding = null;
|
||||
resourceBinary = null;
|
||||
resourceText = null;
|
||||
}
|
||||
|
||||
|
@ -713,6 +726,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
changed = true;
|
||||
}
|
||||
|
||||
resourceBinary = null;
|
||||
resourceText = null;
|
||||
encoding = ResourceEncodingEnum.DEL;
|
||||
}
|
||||
|
@ -737,13 +751,17 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) {
|
||||
changed = true;
|
||||
} else {
|
||||
changed = !StringUtils.equals(currentHistoryVersion.getResourceTextVc(), resourceText);
|
||||
// TODO: LD: Once 2024-02 it out the door we should consider further refactoring here to move
|
||||
// more of this logic within the calculator and eliminate more local variables
|
||||
changed = myResourceHistoryCalculator.isResourceHistoryChanged(
|
||||
currentHistoryVersion, resourceBinary, resourceText);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
EncodedResource retVal = new EncodedResource();
|
||||
retVal.setEncoding(encoding);
|
||||
retVal.setResourceBinary(resourceBinary);
|
||||
retVal.setResourceText(resourceText);
|
||||
retVal.setChanged(changed);
|
||||
|
||||
|
@ -1393,8 +1411,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
ResourceEncodingEnum encoding = myStorageSettings.getResourceEncoding();
|
||||
List<String> excludeElements = new ArrayList<>(8);
|
||||
getExcludedElements(historyEntity.getResourceType(), excludeElements, theResource.getMeta());
|
||||
String encodedResourceString = encodeResource(theResource, encoding, excludeElements, myContext);
|
||||
boolean changed = !StringUtils.equals(historyEntity.getResourceTextVc(), encodedResourceString);
|
||||
String encodedResourceString =
|
||||
myResourceHistoryCalculator.encodeResource(theResource, encoding, excludeElements);
|
||||
byte[] resourceBinary = ResourceHistoryCalculator.getResourceBinary(encoding, encodedResourceString);
|
||||
final boolean changed = myResourceHistoryCalculator.isResourceHistoryChanged(
|
||||
historyEntity, resourceBinary, encodedResourceString);
|
||||
|
||||
historyEntity.setUpdated(theTransactionDetails.getTransactionDate());
|
||||
|
||||
|
@ -1406,14 +1427,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
return historyEntity;
|
||||
}
|
||||
|
||||
populateEncodedResource(encodedResource, encodedResourceString, ResourceEncodingEnum.JSON);
|
||||
myResourceHistoryCalculator.populateEncodedResource(
|
||||
encodedResource, encodedResourceString, resourceBinary, encoding);
|
||||
}
|
||||
|
||||
/*
|
||||
* Save the resource itself to the resourceHistoryTable
|
||||
*/
|
||||
historyEntity = myEntityManager.merge(historyEntity);
|
||||
historyEntity.setEncoding(encodedResource.getEncoding());
|
||||
historyEntity.setResource(encodedResource.getResourceBinary());
|
||||
historyEntity.setResourceTextVc(encodedResource.getResourceText());
|
||||
myResourceHistoryTableDao.save(historyEntity);
|
||||
|
||||
|
@ -1423,8 +1445,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
private void populateEncodedResource(
|
||||
EncodedResource encodedResource, String encodedResourceString, ResourceEncodingEnum theEncoding) {
|
||||
EncodedResource encodedResource,
|
||||
String encodedResourceString,
|
||||
byte[] theResourceBinary,
|
||||
ResourceEncodingEnum theEncoding) {
|
||||
encodedResource.setResourceText(encodedResourceString);
|
||||
encodedResource.setResourceBinary(theResourceBinary);
|
||||
encodedResource.setEncoding(theEncoding);
|
||||
}
|
||||
|
||||
|
@ -1489,6 +1515,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
historyEntry.setEncoding(theChanged.getEncoding());
|
||||
historyEntry.setResource(theChanged.getResourceBinary());
|
||||
historyEntry.setResourceTextVc(theChanged.getResourceText());
|
||||
|
||||
ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId());
|
||||
|
@ -1926,16 +1953,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
return resourceText;
|
||||
}
|
||||
|
||||
public static String encodeResource(
|
||||
IBaseResource theResource,
|
||||
ResourceEncodingEnum theEncoding,
|
||||
List<String> theExcludeElements,
|
||||
FhirContext theContext) {
|
||||
IParser parser = theEncoding.newParser(theContext);
|
||||
parser.setDontEncodeElements(theExcludeElements);
|
||||
return parser.encodeResourceToString(theResource);
|
||||
}
|
||||
|
||||
private static String parseNarrativeTextIntoWords(IBaseResource theResource) {
|
||||
|
||||
StringBuilder b = new StringBuilder();
|
||||
|
|
|
@ -1710,17 +1710,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC
|
||||
|| historyEntity.getEncoding() == ResourceEncodingEnum.JSON) {
|
||||
byte[] resourceBytes = historyEntity.getResource();
|
||||
|
||||
// Always migrate data out of the bytes column
|
||||
if (resourceBytes != null) {
|
||||
String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding());
|
||||
ourLog.debug(
|
||||
"Storing text of resource {} version {} as inline VARCHAR",
|
||||
entity.getResourceId(),
|
||||
historyEntity.getVersion());
|
||||
historyEntity.setResourceTextVc(resourceText);
|
||||
historyEntity.setEncoding(ResourceEncodingEnum.JSON);
|
||||
changed = true;
|
||||
if (myResourceHistoryCalculator.conditionallyAlterHistoryEntity(entity, historyEntity, resourceText)) {
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
|||
class EncodedResource {
|
||||
|
||||
private boolean myChanged;
|
||||
private byte[] myResource;
|
||||
private ResourceEncodingEnum myEncoding;
|
||||
private String myResourceText;
|
||||
|
||||
|
@ -35,6 +36,14 @@ class EncodedResource {
|
|||
myEncoding = theEncoding;
|
||||
}
|
||||
|
||||
public byte[] getResourceBinary() {
|
||||
return myResource;
|
||||
}
|
||||
|
||||
public void setResourceBinary(byte[] theResource) {
|
||||
myResource = theResource;
|
||||
}
|
||||
|
||||
public boolean isChanged() {
|
||||
return myChanged;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,153 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import com.google.common.hash.HashCode;
|
||||
import com.google.common.hash.HashFunction;
|
||||
import com.google.common.hash.Hashing;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Responsible for various resource history-centric and {@link FhirContext} aware operations called by
|
||||
* {@link BaseHapiFhirDao} or {@link BaseHapiFhirResourceDao} that require knowledge of whether an Oracle database is
|
||||
* being used.
|
||||
*/
|
||||
public class ResourceHistoryCalculator {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceHistoryCalculator.class);
|
||||
private static final HashFunction SHA_256 = Hashing.sha256();
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
private final boolean myIsOracleDialect;
|
||||
|
||||
public ResourceHistoryCalculator(FhirContext theFhirContext, boolean theIsOracleDialect) {
|
||||
myFhirContext = theFhirContext;
|
||||
myIsOracleDialect = theIsOracleDialect;
|
||||
}
|
||||
|
||||
ResourceHistoryState calculateResourceHistoryState(
|
||||
IBaseResource theResource, ResourceEncodingEnum theEncoding, List<String> theExcludeElements) {
|
||||
final String encodedResource = encodeResource(theResource, theEncoding, theExcludeElements);
|
||||
final byte[] resourceBinary;
|
||||
final String resourceText;
|
||||
final ResourceEncodingEnum encoding;
|
||||
final HashCode hashCode;
|
||||
|
||||
if (myIsOracleDialect) {
|
||||
resourceText = null;
|
||||
resourceBinary = getResourceBinary(theEncoding, encodedResource);
|
||||
encoding = theEncoding;
|
||||
hashCode = SHA_256.hashBytes(resourceBinary);
|
||||
} else {
|
||||
resourceText = encodedResource;
|
||||
resourceBinary = null;
|
||||
encoding = ResourceEncodingEnum.JSON;
|
||||
hashCode = SHA_256.hashUnencodedChars(encodedResource);
|
||||
}
|
||||
|
||||
return new ResourceHistoryState(resourceText, resourceBinary, encoding, hashCode);
|
||||
}
|
||||
|
||||
boolean conditionallyAlterHistoryEntity(
|
||||
ResourceTable theEntity, ResourceHistoryTable theHistoryEntity, String theResourceText) {
|
||||
if (!myIsOracleDialect) {
|
||||
ourLog.debug(
|
||||
"Storing text of resource {} version {} as inline VARCHAR",
|
||||
theEntity.getResourceId(),
|
||||
theHistoryEntity.getVersion());
|
||||
theHistoryEntity.setResourceTextVc(theResourceText);
|
||||
theHistoryEntity.setResource(null);
|
||||
theHistoryEntity.setEncoding(ResourceEncodingEnum.JSON);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean isResourceHistoryChanged(
|
||||
ResourceHistoryTable theCurrentHistoryVersion,
|
||||
@Nullable byte[] theResourceBinary,
|
||||
@Nullable String resourceText) {
|
||||
if (myIsOracleDialect) {
|
||||
return !Arrays.equals(theCurrentHistoryVersion.getResource(), theResourceBinary);
|
||||
}
|
||||
|
||||
return !StringUtils.equals(theCurrentHistoryVersion.getResourceTextVc(), resourceText);
|
||||
}
|
||||
|
||||
String encodeResource(
|
||||
IBaseResource theResource, ResourceEncodingEnum theEncoding, List<String> theExcludeElements) {
|
||||
final IParser parser = theEncoding.newParser(myFhirContext);
|
||||
parser.setDontEncodeElements(theExcludeElements);
|
||||
return parser.encodeResourceToString(theResource);
|
||||
}
|
||||
|
||||
/**
|
||||
* helper for returning the encoded byte array of the input resource string based on the theEncoding.
|
||||
*
|
||||
* @param theEncoding the theEncoding to used
|
||||
* @param theEncodedResource the resource to encode
|
||||
* @return byte array of the resource
|
||||
*/
|
||||
@Nonnull
|
||||
static byte[] getResourceBinary(ResourceEncodingEnum theEncoding, String theEncodedResource) {
|
||||
switch (theEncoding) {
|
||||
case JSON:
|
||||
return theEncodedResource.getBytes(StandardCharsets.UTF_8);
|
||||
case JSONC:
|
||||
return GZipUtil.compress(theEncodedResource);
|
||||
default:
|
||||
return new byte[0];
|
||||
}
|
||||
}
|
||||
|
||||
void populateEncodedResource(
|
||||
EncodedResource theEncodedResource,
|
||||
String theEncodedResourceString,
|
||||
@Nullable byte[] theResourceBinary,
|
||||
ResourceEncodingEnum theEncoding) {
|
||||
if (myIsOracleDialect) {
|
||||
populateEncodedResourceInner(theEncodedResource, null, theResourceBinary, theEncoding);
|
||||
} else {
|
||||
populateEncodedResourceInner(theEncodedResource, theEncodedResourceString, null, ResourceEncodingEnum.JSON);
|
||||
}
|
||||
}
|
||||
|
||||
private void populateEncodedResourceInner(
|
||||
EncodedResource encodedResource,
|
||||
String encodedResourceString,
|
||||
byte[] theResourceBinary,
|
||||
ResourceEncodingEnum theEncoding) {
|
||||
encodedResource.setResourceText(encodedResourceString);
|
||||
encodedResource.setResourceBinary(theResourceBinary);
|
||||
encodedResource.setEncoding(theEncoding);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,105 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
import com.google.common.hash.HashCode;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.StringJoiner;
|
||||
|
||||
/**
|
||||
* POJO to contain the results of {@link ResourceHistoryCalculator#calculateResourceHistoryState(IBaseResource, ResourceEncodingEnum, List)}
|
||||
*/
|
||||
public class ResourceHistoryState {
|
||||
@Nullable
|
||||
private final String myResourceText;
|
||||
|
||||
@Nullable
|
||||
private final byte[] myResourceBinary;
|
||||
|
||||
private final ResourceEncodingEnum myEncoding;
|
||||
private final HashCode myHashCode;
|
||||
|
||||
public ResourceHistoryState(
|
||||
@Nullable String theResourceText,
|
||||
@Nullable byte[] theResourceBinary,
|
||||
ResourceEncodingEnum theEncoding,
|
||||
HashCode theHashCode) {
|
||||
myResourceText = theResourceText;
|
||||
myResourceBinary = theResourceBinary;
|
||||
myEncoding = theEncoding;
|
||||
myHashCode = theHashCode;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getResourceText() {
|
||||
return myResourceText;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public byte[] getResourceBinary() {
|
||||
return myResourceBinary;
|
||||
}
|
||||
|
||||
public ResourceEncodingEnum getEncoding() {
|
||||
return myEncoding;
|
||||
}
|
||||
|
||||
public HashCode getHashCode() {
|
||||
return myHashCode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object theO) {
|
||||
if (this == theO) {
|
||||
return true;
|
||||
}
|
||||
if (theO == null || getClass() != theO.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ResourceHistoryState that = (ResourceHistoryState) theO;
|
||||
return Objects.equals(myResourceText, that.myResourceText)
|
||||
&& Arrays.equals(myResourceBinary, that.myResourceBinary)
|
||||
&& myEncoding == that.myEncoding
|
||||
&& Objects.equals(myHashCode, that.myHashCode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Objects.hash(myResourceText, myEncoding, myHashCode);
|
||||
result = 31 * result + Arrays.hashCode(myResourceBinary);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new StringJoiner(", ", ResourceHistoryState.class.getSimpleName() + "[", "]")
|
||||
.add("myResourceText='" + myResourceText + "'")
|
||||
.add("myResourceBinary=" + Arrays.toString(myResourceBinary))
|
||||
.add("myEncoding=" + myEncoding)
|
||||
.add("myHashCode=" + myHashCode)
|
||||
.toString();
|
||||
}
|
||||
}
|
|
@ -477,10 +477,19 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
|
||||
@Override
|
||||
public Optional<String> translatePidIdToForcedIdWithCache(JpaPid theId) {
|
||||
return myMemoryCacheService.get(
|
||||
MemoryCacheService.CacheEnum.PID_TO_FORCED_ID,
|
||||
theId.getId(),
|
||||
pid -> myResourceTableDao.findById(pid).map(ResourceTable::asTypedFhirResourceId));
|
||||
// do getIfPresent and then put to avoid doing I/O inside the cache.
|
||||
Optional<String> forcedId =
|
||||
myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theId.getId());
|
||||
|
||||
if (forcedId == null) {
|
||||
// This is only called when we know the resource exists.
|
||||
// So this optional is only empty when there is no hfj_forced_id table
|
||||
// note: this is obsolete with the new fhir_id column, and will go away.
|
||||
forcedId = myResourceTableDao.findById(theId.getId()).map(ResourceTable::asTypedFhirResourceId);
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theId.getId(), forcedId);
|
||||
}
|
||||
|
||||
return forcedId;
|
||||
}
|
||||
|
||||
private ListMultimap<String, String> organizeIdsByResourceType(Collection<IIdType> theIds) {
|
||||
|
|
|
@ -133,10 +133,12 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
mdmLinkTable
|
||||
.addIndex("20230911.1", "IDX_EMPI_TGT_MR_LS")
|
||||
.unique(false)
|
||||
.online(true)
|
||||
.withColumns("TARGET_TYPE", "MATCH_RESULT", "LINK_SOURCE");
|
||||
mdmLinkTable
|
||||
.addIndex("20230911.2", "IDX_EMPi_TGT_MR_SCore")
|
||||
.unique(false)
|
||||
.online(true)
|
||||
.withColumns("TARGET_TYPE", "MATCH_RESULT", "SCORE");
|
||||
|
||||
// Move forced_id constraints to hfj_resource and the new fhir_id column
|
||||
|
@ -166,7 +168,11 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.withColumns("RES_TYPE", "FHIR_ID");
|
||||
|
||||
// For resolving references that don't supply the type.
|
||||
hfjResource.addIndex("20231027.3", "IDX_RES_FHIR_ID").unique(false).withColumns("FHIR_ID");
|
||||
hfjResource
|
||||
.addIndex("20231027.3", "IDX_RES_FHIR_ID")
|
||||
.unique(false)
|
||||
.online(true)
|
||||
.withColumns("FHIR_ID");
|
||||
|
||||
Builder.BuilderWithTableName batch2JobInstanceTable = version.onTable("BT2_JOB_INSTANCE");
|
||||
|
||||
|
@ -177,25 +183,32 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
{
|
||||
version.executeRawSql(
|
||||
"20231212.1",
|
||||
"CREATE INDEX idx_sp_string_hash_nrm_pattern_ops ON public.hfj_spidx_string USING btree (hash_norm_prefix, sp_value_normalized varchar_pattern_ops, res_id, partition_id)")
|
||||
"CREATE INDEX CONCURRENTLY idx_sp_string_hash_nrm_pattern_ops ON public.hfj_spidx_string USING btree (hash_norm_prefix, sp_value_normalized varchar_pattern_ops, res_id, partition_id)")
|
||||
.setTransactional(false)
|
||||
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
|
||||
.onlyIf(
|
||||
String.format(
|
||||
QUERY_FOR_COLUMN_COLLATION_TEMPLATE,
|
||||
"HFJ_SPIDX_STRING".toLowerCase(),
|
||||
"SP_VALUE_NORMALIZED".toLowerCase()),
|
||||
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing");
|
||||
|
||||
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing")
|
||||
.onlyIf(
|
||||
"SELECT NOT EXISTS(select 1 from pg_indexes where indexname='idx_sp_string_hash_nrm_pattern_ops')",
|
||||
"Index idx_sp_string_hash_nrm_pattern_ops already exists");
|
||||
version.executeRawSql(
|
||||
"20231212.2",
|
||||
"CREATE UNIQUE INDEX idx_sp_uri_hash_identity_pattern_ops ON public.hfj_spidx_uri USING btree (hash_identity, sp_uri varchar_pattern_ops, res_id, partition_id)")
|
||||
"CREATE UNIQUE INDEX CONCURRENTLY idx_sp_uri_hash_identity_pattern_ops ON public.hfj_spidx_uri USING btree (hash_identity, sp_uri varchar_pattern_ops, res_id, partition_id)")
|
||||
.setTransactional(false)
|
||||
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
|
||||
.onlyIf(
|
||||
String.format(
|
||||
QUERY_FOR_COLUMN_COLLATION_TEMPLATE,
|
||||
"HFJ_SPIDX_URI".toLowerCase(),
|
||||
"SP_URI".toLowerCase()),
|
||||
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing");
|
||||
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing")
|
||||
.onlyIf(
|
||||
"SELECT NOT EXISTS(select 1 from pg_indexes where indexname='idx_sp_uri_hash_identity_pattern_ops')",
|
||||
"Index idx_sp_uri_hash_identity_pattern_ops already exists.");
|
||||
}
|
||||
|
||||
// This fix was bad for MSSQL, it has been set to do nothing.
|
||||
|
@ -622,6 +635,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
version.executeRawSqls("20230402.1", Map.of(DriverTypeEnum.POSTGRES_9_4, postgresTuningStatements));
|
||||
|
||||
// Use an unlimited length text column for RES_TEXT_VC
|
||||
// N.B. This will FAIL SILENTLY on Oracle due to the fact that Oracle does not support an ALTER TABLE from
|
||||
// VARCHAR to
|
||||
// CLOB. Because of failureAllowed() this won't halt the migration
|
||||
version.onTable("HFJ_RES_VER")
|
||||
.modifyColumn("20230421.1", "RES_TEXT_VC")
|
||||
.nullable()
|
||||
|
|
|
@ -2466,7 +2466,7 @@ public class QueryStack {
|
|||
theRequestPartitionId,
|
||||
andPredicates,
|
||||
nextAnd)) {
|
||||
break;
|
||||
continue;
|
||||
}
|
||||
|
||||
EmbeddedChainedSearchModeEnum embeddedChainedSearchModeEnum =
|
||||
|
|
|
@ -649,7 +649,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
.getMessage(TermReadSvcImpl.class, "valueSetExpandedUsingPreExpansion", expansionTimestamp);
|
||||
theAccumulator.addMessage(msg);
|
||||
expandConcepts(
|
||||
theExpansionOptions, theAccumulator, termValueSet, theFilter, theAdd, theAddedCodes, isOracleDialect());
|
||||
theExpansionOptions,
|
||||
theAccumulator,
|
||||
termValueSet,
|
||||
theFilter,
|
||||
theAdd,
|
||||
theAddedCodes,
|
||||
myHibernatePropertiesProvider.isOracleDialect());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
@ -664,10 +670,6 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
return expansionTimestamp;
|
||||
}
|
||||
|
||||
private boolean isOracleDialect() {
|
||||
return myHibernatePropertiesProvider.getDialect() instanceof org.hibernate.dialect.OracleDialect;
|
||||
}
|
||||
|
||||
private void expandConcepts(
|
||||
ValueSetExpansionOptions theExpansionOptions,
|
||||
IValueSetConceptAccumulator theAccumulator,
|
||||
|
@ -1596,6 +1598,16 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
TermConcept code = findCodeForFilterCriteria(theSystem, theFilter);
|
||||
|
||||
if (theFilter.getOp() == ValueSet.FilterOperator.ISA) {
|
||||
ourLog.debug(
|
||||
" * Filtering on specific code and codes with a parent of {}/{}/{}",
|
||||
code.getId(),
|
||||
code.getCode(),
|
||||
code.getDisplay());
|
||||
|
||||
b.must(f.bool()
|
||||
.should(f.match().field("myParentPids").matching("" + code.getId()))
|
||||
.should(f.match().field("myId").matching(code.getId())));
|
||||
} else if (theFilter.getOp() == ValueSet.FilterOperator.DESCENDENTOF) {
|
||||
ourLog.debug(
|
||||
" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
|
||||
|
||||
|
|
|
@ -0,0 +1,326 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import com.google.common.hash.HashCode;
|
||||
import com.google.common.hash.HashFunction;
|
||||
import com.google.common.hash.Hashing;
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.FhirDstu3;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.hapi.ctx.FhirR4;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.LocalDate;
|
||||
import java.time.Month;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class ResourceHistoryCalculatorTest {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceHistoryCalculatorTest.class);
|
||||
|
||||
private static final FhirContext CONTEXT = FhirContext.forR4Cached();
|
||||
|
||||
private static final ResourceHistoryCalculator CALCULATOR_ORACLE = new ResourceHistoryCalculator(CONTEXT, true);
|
||||
private static final ResourceHistoryCalculator CALCULATOR_NON_ORACLE = new ResourceHistoryCalculator(CONTEXT, false);
|
||||
|
||||
private static final LocalDate TODAY = LocalDate.of(2024, Month.JANUARY, 25);
|
||||
private static final String ENCODED_RESOURCE_1 = "1234";
|
||||
private static final String ENCODED_RESOURCE_2 = "abcd";
|
||||
private static final String RESOURCE_TEXT_VC = "resourceTextVc";
|
||||
private static final List<String> EXCLUDED_ELEMENTS_1 = List.of("id");
|
||||
private static final List<String> EXCLUDED_ELEMENTS_2 = List.of("resourceType", "birthDate");
|
||||
private static final HashFunction SHA_256 = Hashing.sha256();
|
||||
|
||||
private static Stream<Arguments> calculateResourceHistoryStateArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* The purpose of this test is to ensure that the conditional logic to pre-calculate resource history text or binaries
|
||||
* is respected.
|
||||
* If this is for Oracle, the resource text will be driven off a binary with a given encoding with the
|
||||
* resource text effectively ignored.
|
||||
* If this is not Oracle, it will be driven off a JSON encoded text field with
|
||||
* the binary effectively ignored.
|
||||
*/
|
||||
@ParameterizedTest
|
||||
@MethodSource("calculateResourceHistoryStateArguments")
|
||||
void calculateResourceHistoryState(FhirContext theFhirContext, boolean theIsOracle, ResourceEncodingEnum theResourceEncoding, List<String> theExcludedElements) {
|
||||
final IBaseResource patient = getPatient(theFhirContext);
|
||||
|
||||
final ResourceHistoryCalculator calculator = getCalculator(theFhirContext, theIsOracle);
|
||||
final ResourceHistoryState result = calculator.calculateResourceHistoryState(patient, theResourceEncoding, theExcludedElements);
|
||||
|
||||
if (theIsOracle) {
|
||||
assertNotNull(result.getResourceBinary()); // On Oracle: We use the resource binary to serve up the resource content
|
||||
assertNull(result.getResourceText()); // On Oracle: We do NOT use the resource text to serve up the resource content
|
||||
assertEquals(theResourceEncoding, result.getEncoding()); // On Oracle, the resource encoding is what we used to encode the binary
|
||||
assertEquals(SHA_256.hashBytes(result.getResourceBinary()), result.getHashCode()); // On Oracle, the SHA 256 hash is of the binary
|
||||
} else {
|
||||
assertNull(result.getResourceBinary()); // Non-Oracle: We do NOT use the resource binary to serve up the resource content
|
||||
assertNotNull(result.getResourceText()); // Non-Oracle: We use the resource text to serve up the resource content
|
||||
assertEquals(ResourceEncodingEnum.JSON, result.getEncoding()); // Non-Oracle, since we didn't encode a binary this is always JSON.
|
||||
final HashCode expectedHashCode = SHA_256.hashUnencodedChars(calculator.encodeResource(patient, theResourceEncoding, theExcludedElements)); // Non-Oracle, the SHA 256 hash is of the parsed resource object
|
||||
assertEquals(expectedHashCode, result.getHashCode());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static Stream<Arguments> conditionallyAlterHistoryEntityArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("conditionallyAlterHistoryEntityArguments")
|
||||
void conditionallyAlterHistoryEntity_usesVarcharForOracle(boolean theIsOracle, ResourceEncodingEnum theResourceEncoding, String theResourceText) {
|
||||
final ResourceTable resourceTable = new ResourceTable();
|
||||
resourceTable.setId(123L);
|
||||
|
||||
final ResourceHistoryTable resourceHistoryTable = new ResourceHistoryTable();
|
||||
resourceHistoryTable.setVersion(1);
|
||||
resourceHistoryTable.setResource("resource".getBytes(StandardCharsets.UTF_8));
|
||||
resourceHistoryTable.setEncoding(theResourceEncoding);
|
||||
resourceHistoryTable.setResourceTextVc(RESOURCE_TEXT_VC);
|
||||
|
||||
final boolean isChanged =
|
||||
getCalculator(theIsOracle).conditionallyAlterHistoryEntity(resourceTable, resourceHistoryTable, theResourceText);
|
||||
|
||||
if (theIsOracle) {
|
||||
assertFalse(isChanged);
|
||||
assertNotNull(resourceHistoryTable.getResource());
|
||||
assertEquals(RESOURCE_TEXT_VC, resourceHistoryTable.getResourceTextVc());
|
||||
assertEquals(resourceHistoryTable.getEncoding(), resourceHistoryTable.getEncoding());
|
||||
} else {
|
||||
assertTrue(isChanged);
|
||||
assertNull(resourceHistoryTable.getResource());
|
||||
assertEquals(theResourceText, resourceHistoryTable.getResourceTextVc());
|
||||
assertEquals(resourceHistoryTable.getEncoding(), ResourceEncodingEnum.JSON);
|
||||
}
|
||||
}
|
||||
|
||||
private static Stream<Arguments> encodeResourceArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("encodeResourceArguments")
|
||||
void encodeResource_ensureFhirVersionSpecificAndIntendedElementsExcluded(FhirContext theFhirContext, ResourceEncodingEnum theResourceEncoding, List<String> theExcludedElements) {
|
||||
final IBaseResource patient = getPatient(theFhirContext);
|
||||
final String encodedResource = getCalculator(theFhirContext, true).encodeResource(patient, theResourceEncoding, theExcludedElements);
|
||||
|
||||
final String expectedEncoding =
|
||||
theResourceEncoding.newParser(theFhirContext).setDontEncodeElements(theExcludedElements).encodeResourceToString(patient);
|
||||
|
||||
assertEquals(expectedEncoding, encodedResource);
|
||||
}
|
||||
|
||||
private static Stream<Arguments> getResourceBinaryArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||
Arguments.of(ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||
Arguments.of(ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||
Arguments.of(ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||
Arguments.of(ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||
Arguments.of(ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||
Arguments.of(ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||
Arguments.of(ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("getResourceBinaryArguments")
|
||||
void getResourceBinary(ResourceEncodingEnum theResourceEncoding, String theEncodedResource) {
|
||||
final byte[] resourceBinary = ResourceHistoryCalculator.getResourceBinary(theResourceEncoding, theEncodedResource);
|
||||
|
||||
switch (theResourceEncoding) {
|
||||
case JSON:
|
||||
assertArrayEquals(theEncodedResource.getBytes(StandardCharsets.UTF_8), resourceBinary);
|
||||
break;
|
||||
case JSONC:
|
||||
assertArrayEquals(GZipUtil.compress(theEncodedResource), resourceBinary);
|
||||
break;
|
||||
case DEL :
|
||||
case ESR :
|
||||
default:
|
||||
assertArrayEquals(new byte[0], resourceBinary);
|
||||
}
|
||||
|
||||
ourLog.info("resourceBinary: {}", resourceBinary);
|
||||
}
|
||||
|
||||
private static Stream<Arguments> isResourceHistoryChangedArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(true, ENCODED_RESOURCE_1.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ENCODED_RESOURCE_1.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ENCODED_RESOURCE_2.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ENCODED_RESOURCE_2.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_2)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("isResourceHistoryChangedArguments")
|
||||
void isResourceHistoryChanged(boolean theIsOracle, byte[] theNewBinary, String theNewResourceText) {
|
||||
final String existngResourceText = ENCODED_RESOURCE_1;
|
||||
final byte[] existingBytes = existngResourceText.getBytes(StandardCharsets.UTF_8);
|
||||
|
||||
final ResourceHistoryTable resourceHistoryTable = new ResourceHistoryTable();
|
||||
resourceHistoryTable.setResource(existingBytes);
|
||||
resourceHistoryTable.setResourceTextVc(existngResourceText);
|
||||
|
||||
final boolean isChanged = getCalculator(theIsOracle).isResourceHistoryChanged(resourceHistoryTable, theNewBinary, theNewResourceText);
|
||||
|
||||
if (theIsOracle) {
|
||||
final boolean expectedResult = !Arrays.equals(existingBytes, theNewBinary);
|
||||
assertEquals(expectedResult, isChanged);
|
||||
} else {
|
||||
final boolean expectedResult = ! existngResourceText.equals(theNewResourceText);
|
||||
assertEquals(expectedResult, isChanged);
|
||||
}
|
||||
}
|
||||
|
||||
private static Stream<Arguments> populateEncodedResourceArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("populateEncodedResourceArguments")
|
||||
void populateEncodedResource(boolean theIsOracle, ResourceEncodingEnum theResourceEncoding, String theEncodedResourceString) {
|
||||
final EncodedResource encodedResource = new EncodedResource();
|
||||
final byte[] resourceBinary = theEncodedResourceString.getBytes(StandardCharsets.UTF_8);
|
||||
|
||||
getCalculator(theIsOracle)
|
||||
.populateEncodedResource(encodedResource, theEncodedResourceString, resourceBinary, theResourceEncoding);
|
||||
|
||||
if (theIsOracle) {
|
||||
assertEquals(resourceBinary, encodedResource.getResourceBinary());
|
||||
assertNull(encodedResource.getResourceText());
|
||||
assertEquals(theResourceEncoding, encodedResource.getEncoding());
|
||||
} else {
|
||||
assertNull(encodedResource.getResourceBinary());
|
||||
assertEquals(theEncodedResourceString, encodedResource.getResourceText());
|
||||
assertEquals(ResourceEncodingEnum.JSON, encodedResource.getEncoding());
|
||||
}
|
||||
}
|
||||
|
||||
private ResourceHistoryCalculator getCalculator(boolean theIsOracle) {
|
||||
return theIsOracle ? CALCULATOR_ORACLE : CALCULATOR_NON_ORACLE;
|
||||
}
|
||||
|
||||
private ResourceHistoryCalculator getCalculator(FhirContext theFhirContext, boolean theIsOracle) {
|
||||
return new ResourceHistoryCalculator(theFhirContext, theIsOracle);
|
||||
}
|
||||
|
||||
private IBaseResource getPatient(FhirContext theFhirContext) {
|
||||
if (theFhirContext.getVersion() instanceof FhirR4) {
|
||||
return getPatientR4();
|
||||
}
|
||||
|
||||
if (theFhirContext.getVersion() instanceof FhirDstu3) {
|
||||
return getPatientDstu3();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private org.hl7.fhir.dstu3.model.Patient getPatientDstu3() {
|
||||
final org.hl7.fhir.dstu3.model.Patient patient = new org.hl7.fhir.dstu3.model.Patient();
|
||||
|
||||
patient.setId("123");
|
||||
patient.setBirthDate(Date.from(TODAY.atStartOfDay(ZoneId.of("America/Toronto")).toInstant()));
|
||||
|
||||
return patient;
|
||||
}
|
||||
|
||||
private Patient getPatientR4() {
|
||||
final Patient patient = new Patient();
|
||||
|
||||
patient.setId("123");
|
||||
patient.setBirthDate(Date.from(TODAY.atStartOfDay(ZoneId.of("America/Toronto")).toInstant()));
|
||||
|
||||
return patient;
|
||||
}
|
||||
}
|
|
@ -786,7 +786,7 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
|
|||
logAndValidateValueSet(result);
|
||||
|
||||
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
|
||||
assertThat(codes, containsInAnyOrder("childAAA", "childAAB"));
|
||||
assertThat(codes, containsInAnyOrder("childAA", "childAAA", "childAAB"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -32,8 +32,6 @@ import java.io.Serializable;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
|
||||
@Entity
|
||||
@Table(
|
||||
name = ResourceHistoryTable.HFJ_RES_VER,
|
||||
|
@ -86,15 +84,12 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
@OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
||||
private Collection<ResourceHistoryTag> myTags;
|
||||
|
||||
/**
|
||||
* Note: No setter for this field because it's only a legacy way of storing data now.
|
||||
*/
|
||||
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = true)
|
||||
@Lob()
|
||||
@OptimisticLock(excluded = true)
|
||||
private byte[] myResource;
|
||||
|
||||
@Column(name = "RES_TEXT_VC", nullable = true, length = Length.LONG32)
|
||||
@Column(name = "RES_TEXT_VC", length = Length.LONG32, nullable = true)
|
||||
@OptimisticLock(excluded = true)
|
||||
private String myResourceTextVc;
|
||||
|
||||
|
@ -155,8 +150,7 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
}
|
||||
|
||||
public void setResourceTextVc(String theResourceTextVc) {
|
||||
myResource = null;
|
||||
myResourceTextVc = defaultString(theResourceTextVc);
|
||||
myResourceTextVc = theResourceTextVc;
|
||||
}
|
||||
|
||||
public ResourceHistoryProvenanceEntity getProvenance() {
|
||||
|
@ -212,6 +206,10 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
return myResource;
|
||||
}
|
||||
|
||||
public void setResource(byte[] theResource) {
|
||||
myResource = theResource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getResourceId() {
|
||||
return myResourceId;
|
||||
|
|
|
@ -116,7 +116,7 @@ public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails {
|
|||
} else if (theResourceType != null) {
|
||||
op = RestOperationTypeEnum.EXTENDED_OPERATION_TYPE;
|
||||
} else {
|
||||
op = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE;
|
||||
op = RestOperationTypeEnum.EXTENDED_OPERATION_SERVER;
|
||||
}
|
||||
|
||||
return new ReadPartitionIdRequestDetails(theResourceType, op, null, null, null, null, theExtendedOperationName);
|
||||
|
|
|
@ -313,7 +313,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
|
|||
.setSystem(codeSystem.getUrl())
|
||||
.addFilter()
|
||||
.setProperty("concept")
|
||||
.setOp(FilterOperator.ISA)
|
||||
.setOp(FilterOperator.DESCENDENTOF)
|
||||
.setValue("dogs");
|
||||
|
||||
myValueSetDao.create(valueSet, mySrd);
|
||||
|
@ -504,7 +504,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
|
|||
logAndValidateValueSet(result);
|
||||
|
||||
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
|
||||
assertThat(codes, containsInAnyOrder("childAAA", "childAAB"));
|
||||
assertThat(codes, containsInAnyOrder("childAA", "childAAA", "childAAB"));
|
||||
|
||||
}
|
||||
|
||||
|
@ -535,7 +535,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
|
|||
logAndValidateValueSet(result);
|
||||
|
||||
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
|
||||
assertThat(codes, containsInAnyOrder("childAAA", "childAAB"));
|
||||
assertThat(codes, containsInAnyOrder("childAA", "childAAA", "childAAB"));
|
||||
|
||||
}
|
||||
|
||||
|
@ -650,7 +650,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
|
|||
ValueSet vs = new ValueSet();
|
||||
ConceptSetComponent include = vs.getCompose().addInclude();
|
||||
include.setSystem(URL_MY_CODE_SYSTEM);
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.ISA).setValue("ParentA");
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.DESCENDENTOF).setValue("ParentA");
|
||||
|
||||
ValueSet result = myValueSetDao.expand(vs, null);
|
||||
logAndValidateValueSet(result);
|
||||
|
@ -669,7 +669,7 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
|
|||
vs = new ValueSet();
|
||||
include = vs.getCompose().addInclude();
|
||||
include.setSystem(URL_MY_CODE_SYSTEM);
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.ISA).setValue("ParentA");
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.DESCENDENTOF).setValue("ParentA");
|
||||
result = myValueSetDao.expand(vs, null);
|
||||
logAndValidateValueSet(result);
|
||||
|
||||
|
|
|
@ -230,7 +230,7 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
|
|||
ConceptSetComponent include = myLocalVs.getCompose().addInclude();
|
||||
include.setSystem(theCodeSystemUrl);
|
||||
include.setVersion(theValueSetVersion);
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.ISA).setValue("ParentA");
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.DESCENDENTOF).setValue("ParentA");
|
||||
return myLocalVs;
|
||||
|
||||
}
|
||||
|
|
|
@ -1,11 +1,16 @@
|
|||
package ca.uhn.fhir.validator;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.model.primitive.CodeDt;
|
||||
import ca.uhn.fhir.util.AttachmentUtil;
|
||||
import org.hl7.fhir.instance.model.api.ICompositeType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.Attachment;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
||||
public class AttachmentUtilTest {
|
||||
|
||||
|
@ -53,4 +58,18 @@ public class AttachmentUtilTest {
|
|||
String encoded = ctx.newJsonParser().encodeResourceToString(communication);
|
||||
assertEquals("{\"resourceType\":\"Communication\",\"payload\":[{\"contentAttachment\":{\"contentType\":\"text/plain\",\"data\":\"AAECAw==\",\"url\":\"http://foo\",\"size\":123}}]}", encoded);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetOrCreateContentTypeOnEmptyAttachmentR4(){
|
||||
FhirContext ctx = FhirContext.forR4Cached();
|
||||
Attachment attachment = (Attachment) AttachmentUtil.newInstance(ctx);
|
||||
|
||||
assertNull(attachment.getContentType());
|
||||
|
||||
IPrimitiveType<CodeDt> contentType = AttachmentUtil.getOrCreateContentType(ctx, attachment);
|
||||
|
||||
contentType.setValueAsString("text/plain");
|
||||
|
||||
assertNotNull(attachment.getContentType());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -87,6 +87,7 @@ import static org.hamcrest.Matchers.empty;
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
|
@ -299,9 +300,9 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
assertThat(result.getRequiresAccessToken(), is(equalTo(true)));
|
||||
assertThat(result.getTransactionTime(), is(notNullValue()));
|
||||
assertEquals(result.getOutput().size(), 3);
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Patient")).collect(Collectors.toList()).size());
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Observation")).collect(Collectors.toList()).size());
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Encounter")).collect(Collectors.toList()).size());
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Patient")).count());
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Observation")).count());
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Encounter")).count());
|
||||
|
||||
//We assert specifically on content as the deserialized version will "helpfully" fill in missing fields.
|
||||
assertThat(responseContent, containsString("\"error\" : [ ]"));
|
||||
|
@ -338,8 +339,8 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
assertThat(result.getRequiresAccessToken(), is(equalTo(true)));
|
||||
assertThat(result.getTransactionTime(), is(notNullValue()));
|
||||
assertEquals(result.getOutput().size(), 1);
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Patient")).collect(Collectors.toList()).size());
|
||||
assertEquals(0, result.getOutput().stream().filter(o -> o.getType().equals("Binary")).collect(Collectors.toList()).size());
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Patient")).count());
|
||||
assertEquals(0, result.getOutput().stream().filter(o -> o.getType().equals("Binary")).count());
|
||||
|
||||
//We assert specifically on content as the deserialized version will "helpfully" fill in missing fields.
|
||||
assertThat(responseContent, containsString("\"error\" : [ ]"));
|
||||
|
@ -381,7 +382,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
}
|
||||
|
||||
HashSet<String> types = Sets.newHashSet("Patient");
|
||||
BulkExportJobResults bulkExportJobResults = startSystemBulkExportJobAndAwaitCompletion(types, new HashSet<String>());
|
||||
BulkExportJobResults bulkExportJobResults = startSystemBulkExportJobAndAwaitCompletion(types, new HashSet<>());
|
||||
Map<String, List<String>> resourceTypeToBinaryIds = bulkExportJobResults.getResourceTypeToBinaryIds();
|
||||
assertThat(resourceTypeToBinaryIds.get("Patient"), hasSize(1));
|
||||
String patientBinaryId = resourceTypeToBinaryIds.get("Patient").get(0);
|
||||
|
@ -477,7 +478,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
String entities = myJobInstanceRepository
|
||||
.findAll()
|
||||
.stream()
|
||||
.map(t -> t.toString())
|
||||
.map(Batch2JobInstanceEntity::toString)
|
||||
.collect(Collectors.joining("\n * "));
|
||||
ourLog.info("Entities:\n * " + entities);
|
||||
});
|
||||
|
@ -492,6 +493,41 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
assertEquals(patientCount, jobInstance.getCombinedRecordsProcessed());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyExport() {
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Collections.singleton("Patient"));
|
||||
options.setFilters(Collections.emptySet());
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
startRequest.setParameters(options);
|
||||
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest);
|
||||
|
||||
assertNotNull(startResponse);
|
||||
|
||||
final String jobId = startResponse.getInstanceId();
|
||||
|
||||
// Run a scheduled pass to build the export
|
||||
myBatch2JobHelper.awaitJobCompletion(startResponse.getInstanceId());
|
||||
runInTransaction(() -> {
|
||||
String entities = myJobInstanceRepository
|
||||
.findAll()
|
||||
.stream()
|
||||
.map(Batch2JobInstanceEntity::toString)
|
||||
.collect(Collectors.joining("\n * "));
|
||||
ourLog.info("Entities:\n * " + entities);
|
||||
});
|
||||
|
||||
final Optional<JobInstance> optJobInstance = myJobPersistence.fetchInstance(jobId);
|
||||
assertNotNull(optJobInstance);
|
||||
assertTrue(optJobInstance.isPresent());
|
||||
assertThat(optJobInstance.get().getReport(),
|
||||
containsString("Export complete, but no data to generate report for job instance:"));
|
||||
}
|
||||
|
||||
private void logContentTypeAndResponse(Header[] headers, String response) {
|
||||
ourLog.info("**************************");
|
||||
ourLog.info("Content-Type is: {}", headers[0]);
|
||||
|
@ -542,7 +578,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
|
||||
// test
|
||||
HashSet<String> types = Sets.newHashSet("Patient", "Observation");
|
||||
BulkExportJobResults bulkExportJobResults = startPatientBulkExportJobAndAwaitResults(types, new HashSet<String>(), "ha");
|
||||
BulkExportJobResults bulkExportJobResults = startPatientBulkExportJobAndAwaitResults(types, new HashSet<>(), "ha");
|
||||
Map<String, List<IBaseResource>> typeToResources = convertJobResultsToResources(bulkExportJobResults);
|
||||
assertThat(typeToResources.get("Patient"), hasSize(1));
|
||||
assertThat(typeToResources.get("Observation"), hasSize(1));
|
||||
|
@ -605,6 +641,34 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
assertTrue(patientIds.contains(resourceId));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExportEmptyResult() {
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient"));
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
startRequest.setParameters(options);
|
||||
Batch2JobStartResponse job = myJobCoordinator.startInstance(mySrd, startRequest);
|
||||
myBatch2JobHelper.awaitJobCompletion(job.getInstanceId(), 60);
|
||||
ourLog.debug("Job status after awaiting - {}", myJobCoordinator.getInstance(job.getInstanceId()).getStatus());
|
||||
await()
|
||||
.atMost(300, TimeUnit.SECONDS)
|
||||
.until(() -> {
|
||||
StatusEnum status = myJobCoordinator.getInstance(job.getInstanceId()).getStatus();
|
||||
if (!StatusEnum.COMPLETED.equals(status)) {
|
||||
fail("Job status was changed from COMPLETE to " + status);
|
||||
}
|
||||
return myJobCoordinator.getInstance(job.getInstanceId()).getReport() != null;
|
||||
});
|
||||
|
||||
String report = myJobCoordinator.getInstance(job.getInstanceId()).getReport();
|
||||
assertThat(report,
|
||||
containsString("Export complete, but no data to generate report for job instance:"));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -1081,7 +1145,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
""";
|
||||
Bundle bundle = parser.parseResource(Bundle.class, bundleStr);
|
||||
myClient.transaction().withBundle(bundle).execute();
|
||||
|
@ -1218,6 +1282,21 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
assertThat(typeToContents.get("Patient"), not(containsString("POG2")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExportEmptyResult() {
|
||||
Group group = new Group();
|
||||
group.setId("Group/G-empty");
|
||||
group.setActive(true);
|
||||
myClient.update().resource(group).execute();
|
||||
|
||||
HashSet<String> resourceTypes = Sets.newHashSet("Patient");
|
||||
BulkExportJobResults bulkExportJobResults = startGroupBulkExportJobAndAwaitCompletion(
|
||||
resourceTypes, new HashSet<>(), "G-empty");
|
||||
|
||||
assertThat(bulkExportJobResults.getReportMsg(),
|
||||
startsWith("Export complete, but no data to generate report for job instance:"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupBulkExportMultipleResourceTypes() {
|
||||
Patient patient = new Patient();
|
||||
|
@ -1398,7 +1477,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
Map<String, List<IBaseResource>> convertJobResultsToResources(BulkExportJobResults theResults) {
|
||||
Map<String, String> stringStringMap = convertJobResultsToStringContents(theResults);
|
||||
Map<String, List<IBaseResource>> typeToResources = new HashMap<>();
|
||||
stringStringMap.entrySet().forEach(entry -> typeToResources.put(entry.getKey(), convertNDJSONToResources(entry.getValue())));
|
||||
stringStringMap.forEach((key, value) -> typeToResources.put(key, convertNDJSONToResources(value)));
|
||||
return typeToResources;
|
||||
}
|
||||
|
||||
|
@ -1412,8 +1491,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
private String getBinaryContentsAsString(String theBinaryId) {
|
||||
Binary binary = myBinaryDao.read(new IdType(theBinaryId));
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, binary.getContentType());
|
||||
String contents = new String(binary.getContent(), Constants.CHARSET_UTF8);
|
||||
return contents;
|
||||
return new String(binary.getContent(), Constants.CHARSET_UTF8);
|
||||
}
|
||||
|
||||
BulkExportJobResults startGroupBulkExportJobAndAwaitCompletion(HashSet<String> theResourceTypes, HashSet<String> theFilters, String theGroupId) {
|
||||
|
@ -1509,8 +1587,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
await().atMost(300, TimeUnit.SECONDS).until(() -> myJobCoordinator.getInstance(jobInstanceId).getReport() != null);
|
||||
|
||||
String report = myJobCoordinator.getInstance(jobInstanceId).getReport();
|
||||
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
|
||||
return results;
|
||||
return JsonUtil.deserialize(report, BulkExportJobResults.class);
|
||||
}
|
||||
|
||||
private void verifyBulkExportResults(String theGroupId, HashSet<String> theFilters, List<String> theContainedList, List<String> theExcludedList) {
|
||||
|
|
|
@ -33,11 +33,13 @@ import org.springframework.beans.factory.annotation.Qualifier;
|
|||
import org.springframework.data.domain.Pageable;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.blankOrNullString;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -52,7 +54,9 @@ import static org.junit.jupiter.api.Assertions.fail;
|
|||
public class BulkImportR4Test extends BaseJpaR4Test {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BulkImportR4Test.class);
|
||||
private final BulkImportFileServlet myBulkImportFileServlet = new BulkImportFileServlet();
|
||||
private static final String USERNAME = "username";
|
||||
private static final String PASSWORD = "password";
|
||||
private final BulkImportFileServlet myBulkImportFileServlet = new BulkImportFileServlet(USERNAME, PASSWORD);
|
||||
@RegisterExtension
|
||||
private final HttpServletExtension myHttpServletExtension = new HttpServletExtension()
|
||||
.withServlet(myBulkImportFileServlet);
|
||||
|
@ -76,6 +80,45 @@ public class BulkImportR4Test extends BaseJpaR4Test {
|
|||
await().until(() -> channel.getQueueSizeForUnitTest() == 0);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testBulkImportFailsWith403OnBadCredentials() {
|
||||
|
||||
BulkImportJobParameters parameters = new BulkImportJobParameters();
|
||||
String url = myHttpServletExtension.getBaseUrl() + "/download?index=test"; // Name doesnt matter, its going to fail with 403 anyhow
|
||||
parameters.addNdJsonUrl(url);
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
request.setJobDefinitionId(BulkImportAppCtx.JOB_BULK_IMPORT_PULL);
|
||||
request.setParameters(parameters);
|
||||
|
||||
// Execute
|
||||
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request);
|
||||
String instanceId = startResponse.getInstanceId();
|
||||
assertThat(instanceId, not(blankOrNullString()));
|
||||
ourLog.info("Execution got ID: {}", instanceId);
|
||||
|
||||
// Verify
|
||||
await().atMost(120, TimeUnit.SECONDS).until(() -> {
|
||||
myJobCleanerService.runMaintenancePass();
|
||||
JobInstance instance = myJobCoordinator.getInstance(instanceId);
|
||||
return instance.getStatus();
|
||||
}, equalTo(StatusEnum.FAILED));
|
||||
|
||||
//No resources stored
|
||||
runInTransaction(() -> {
|
||||
assertEquals(0, myResourceTableDao.count());
|
||||
});
|
||||
|
||||
|
||||
//Should have 403
|
||||
runInTransaction(() -> {
|
||||
JobInstance instance = myJobCoordinator.getInstance(instanceId);
|
||||
ourLog.info("Instance details:\n{}", JsonUtil.serialize(instance, true));
|
||||
assertEquals(1, instance.getErrorCount());
|
||||
assertThat(instance.getErrorMessage(), is(containsString("Received HTTP 403")));
|
||||
});
|
||||
|
||||
}
|
||||
@Test
|
||||
public void testRunBulkImport() {
|
||||
// Setup
|
||||
|
@ -84,6 +127,8 @@ public class BulkImportR4Test extends BaseJpaR4Test {
|
|||
List<String> indexes = addFiles(fileCount);
|
||||
|
||||
BulkImportJobParameters parameters = new BulkImportJobParameters();
|
||||
|
||||
parameters.setHttpBasicCredentials(USERNAME + ":" + PASSWORD);
|
||||
for (String next : indexes) {
|
||||
String url = myHttpServletExtension.getBaseUrl() + "/download?index=" + next;
|
||||
parameters.addNdJsonUrl(url);
|
||||
|
@ -132,6 +177,7 @@ public class BulkImportR4Test extends BaseJpaR4Test {
|
|||
List<String> indexes = addFiles(fileCount);
|
||||
|
||||
BulkImportJobParameters parameters = new BulkImportJobParameters();
|
||||
parameters.setHttpBasicCredentials(USERNAME + ":" + PASSWORD);
|
||||
for (String next : indexes) {
|
||||
String url = myHttpServletExtension.getBaseUrl() + "/download?index=" + next;
|
||||
parameters.addNdJsonUrl(url);
|
||||
|
@ -219,6 +265,7 @@ public class BulkImportR4Test extends BaseJpaR4Test {
|
|||
indexes.add(myBulkImportFileServlet.registerFileByContents("{\"resourceType\":\"Foo\"}"));
|
||||
|
||||
BulkImportJobParameters parameters = new BulkImportJobParameters();
|
||||
parameters.setHttpBasicCredentials(USERNAME + ":" + PASSWORD);
|
||||
for (String next : indexes) {
|
||||
String url = myHttpServletExtension.getBaseUrl() + "/download?index=" + next;
|
||||
parameters.addNdJsonUrl(url);
|
||||
|
@ -260,6 +307,7 @@ public class BulkImportR4Test extends BaseJpaR4Test {
|
|||
BulkImportJobParameters parameters = new BulkImportJobParameters();
|
||||
String url = myHttpServletExtension.getBaseUrl() + "/download?index=FOO";
|
||||
parameters.addNdJsonUrl(url);
|
||||
parameters.setHttpBasicCredentials(USERNAME + ":" + PASSWORD);
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
request.setJobDefinitionId(BulkImportAppCtx.JOB_BULK_IMPORT_PULL);
|
||||
|
@ -328,7 +376,9 @@ public class BulkImportR4Test extends BaseJpaR4Test {
|
|||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
request.setJobDefinitionId(BulkImportAppCtx.JOB_BULK_IMPORT_PULL);
|
||||
request.setParameters(new BulkImportJobParameters());
|
||||
BulkImportJobParameters parameters = new BulkImportJobParameters();
|
||||
parameters.setHttpBasicCredentials(USERNAME + ":" + PASSWORD);
|
||||
request.setParameters(parameters);
|
||||
|
||||
// Execute
|
||||
|
||||
|
|
|
@ -11,14 +11,18 @@ import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
|||
import ca.uhn.fhir.jpa.util.SqlQuery;
|
||||
import ca.uhn.fhir.parser.StrictErrorHandler;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.AuditEvent;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Composition;
|
||||
import org.hl7.fhir.r4.model.Device;
|
||||
import org.hl7.fhir.r4.model.DomainResource;
|
||||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Location;
|
||||
import org.hl7.fhir.r4.model.MessageHeader;
|
||||
|
@ -27,22 +31,26 @@ import org.hl7.fhir.r4.model.Organization;
|
|||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Quantity;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.Date;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.countMatches;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.in;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
|
||||
|
@ -1573,6 +1581,76 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
|||
countUnionStatementsInGeneratedQuery("/Observation?subject:Location.name=Smith", 1);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
// search url expected count
|
||||
"/Bundle?composition.patient.identifier=system|value-1&composition.patient.birthdate=1980-01-01, 1", // correct identifier, correct birthdate
|
||||
"/Bundle?composition.patient.birthdate=1980-01-01&composition.patient.identifier=system|value-1, 1", // correct birthdate, correct identifier
|
||||
"/Bundle?composition.patient.identifier=system|value-1&composition.patient.birthdate=2000-01-01, 0", // correct identifier, incorrect birthdate
|
||||
"/Bundle?composition.patient.birthdate=2000-01-01&composition.patient.identifier=system|value-1, 0", // incorrect birthdate, correct identifier
|
||||
"/Bundle?composition.patient.identifier=system|value-2&composition.patient.birthdate=1980-01-01, 0", // incorrect identifier, correct birthdate
|
||||
"/Bundle?composition.patient.birthdate=1980-01-01&composition.patient.identifier=system|value-2, 0", // correct birthdate, incorrect identifier
|
||||
"/Bundle?composition.patient.identifier=system|value-2&composition.patient.birthdate=2000-01-01, 0", // incorrect identifier, incorrect birthdate
|
||||
"/Bundle?composition.patient.birthdate=2000-01-01&composition.patient.identifier=system|value-2, 0", // incorrect birthdate, incorrect identifier
|
||||
})
|
||||
public void testMultipleChainedBundleCompositionSearchParameters(String theSearchUrl, int theExpectedCount) {
|
||||
createSearchParameter("bundle-composition-patient-birthdate",
|
||||
"composition.patient.birthdate",
|
||||
"Bundle",
|
||||
"Bundle.entry.resource.ofType(Patient).birthDate",
|
||||
Enumerations.SearchParamType.DATE
|
||||
);
|
||||
|
||||
createSearchParameter("bundle-composition-patient-identifier",
|
||||
"composition.patient.identifier",
|
||||
"Bundle",
|
||||
"Bundle.entry.resource.ofType(Patient).identifier",
|
||||
Enumerations.SearchParamType.TOKEN
|
||||
);
|
||||
|
||||
createDocumentBundleWithPatientDetails("1980-01-01", "system", "value-1");
|
||||
|
||||
SearchParameterMap params = myMatchUrlService.getResourceSearch(theSearchUrl).getSearchParameterMap().setLoadSynchronous(true);
|
||||
assertSearchReturns(myBundleDao, params, theExpectedCount);
|
||||
}
|
||||
|
||||
private void createSearchParameter(String theId, String theCode, String theBase, String theExpression, Enumerations.SearchParamType theType) {
|
||||
SearchParameter searchParameter = new SearchParameter();
|
||||
searchParameter.setId(theId);
|
||||
searchParameter.setCode(theCode);
|
||||
searchParameter.setName(theCode);
|
||||
searchParameter.setUrl("http://example.org/SearchParameter/" + theId);
|
||||
searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
searchParameter.addBase(theBase);
|
||||
searchParameter.setType(theType);
|
||||
searchParameter.setExpression(theExpression);
|
||||
searchParameter = (SearchParameter) mySearchParameterDao.update(searchParameter, mySrd).getResource();
|
||||
mySearchParamRegistry.forceRefresh();
|
||||
assertNotNull(mySearchParamRegistry.getActiveSearchParam(theBase, searchParameter.getName()));
|
||||
}
|
||||
|
||||
private void createDocumentBundleWithPatientDetails(String theBirthDate, String theIdentifierSystem, String theIdentifierValue) {
|
||||
Patient patient = new Patient();
|
||||
patient.setBirthDate(Date.valueOf(theBirthDate));
|
||||
patient.addIdentifier().setSystem(theIdentifierSystem).setValue(theIdentifierValue);
|
||||
patient = (Patient) myPatientDao.create(patient, mySrd).getResource();
|
||||
assertSearchReturns(myPatientDao, SearchParameterMap.newSynchronous(), 1);
|
||||
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.DOCUMENT);
|
||||
Composition composition = new Composition();
|
||||
composition.setType(new CodeableConcept().addCoding(new Coding().setCode("code").setSystem("http://example.org")));
|
||||
bundle.addEntry().setResource(composition);
|
||||
composition.getSubject().setReference(patient.getIdElement().getValue());
|
||||
bundle.addEntry().setResource(patient);
|
||||
myBundleDao.create(bundle, mySrd);
|
||||
assertSearchReturns(myBundleDao, SearchParameterMap.newSynchronous(), 1);
|
||||
}
|
||||
|
||||
private void assertSearchReturns(IFhirResourceDao<?> theDao, SearchParameterMap theSearchParams, int theExpectedCount){
|
||||
assertEquals(theExpectedCount, theDao.search(theSearchParams, mySrd).size());
|
||||
}
|
||||
|
||||
private void countUnionStatementsInGeneratedQuery(String theUrl, int theExpectedNumberOfUnions) throws IOException {
|
||||
myCaptureQueriesListener.clear();
|
||||
searchAndReturnUnqualifiedVersionlessIdValues(theUrl);
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.dao.r4;
|
|||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
|
@ -29,6 +30,7 @@ import ca.uhn.fhir.util.ClasspathUtil;
|
|||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.exparity.hamcrest.date.DateMatchers;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
|
@ -39,6 +41,7 @@ import org.hl7.fhir.r4.model.DecimalType;
|
|||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Identifier;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
|
@ -48,8 +51,12 @@ import org.hl7.fhir.r4.model.Reference;
|
|||
import org.hl7.fhir.r4.model.SampledData;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
import org.hl7.fhir.r4.model.StructureDefinition;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
|
@ -60,6 +67,7 @@ import java.util.ArrayList;
|
|||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
@ -77,6 +85,7 @@ import static org.hamcrest.Matchers.matchesPattern;
|
|||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
|
@ -1224,4 +1233,116 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
|
|||
assertEquals(1, ids.size());
|
||||
}
|
||||
|
||||
@Nested
|
||||
class ConditionalCreates {
|
||||
private static final String SYSTEM = "http://tempuri.org";
|
||||
private static final String VALUE_1 = "1";
|
||||
private static final String VALUE_2 = "2";
|
||||
|
||||
private final Task myTask1 = new Task()
|
||||
.setStatus(Task.TaskStatus.DRAFT)
|
||||
.setIntent(Task.TaskIntent.UNKNOWN)
|
||||
.addIdentifier(new Identifier()
|
||||
.setSystem(SYSTEM)
|
||||
.setValue(VALUE_1));
|
||||
|
||||
private final Task myTask2 = new Task()
|
||||
.setStatus(Task.TaskStatus.DRAFT)
|
||||
.setIntent(Task.TaskIntent.UNKNOWN)
|
||||
.addIdentifier(new Identifier()
|
||||
.setSystem(SYSTEM)
|
||||
.setValue(VALUE_2))
|
||||
.addBasedOn(new Reference().setReference("urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea"));
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
public void testConditionalCreateDependsOnPOSTedResource(boolean theHasQuestionMark) {
|
||||
final IFhirResourceDao<Task> taskDao = getTaskDao();
|
||||
taskDao.create(myTask1, new SystemRequestDetails());
|
||||
|
||||
final List<Task> allTasksPreBundle = searchAllTasks();
|
||||
assertEquals(1, allTasksPreBundle.size());
|
||||
final Task taskPreBundle = allTasksPreBundle.get(0);
|
||||
assertEquals(VALUE_1, taskPreBundle.getIdentifier().get(0).getValue());
|
||||
assertEquals(SYSTEM, taskPreBundle.getIdentifier().get(0).getSystem());
|
||||
|
||||
final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
|
||||
|
||||
final String entryConditionalTemplate = "%sidentifier=http://tempuri.org|1";
|
||||
final String matchUrl = String.format(entryConditionalTemplate, theHasQuestionMark ? "?" : "");
|
||||
|
||||
bundleBuilder.addTransactionCreateEntry(myTask2)
|
||||
.conditional(matchUrl);
|
||||
|
||||
final List<Bundle.BundleEntryComponent> responseEntries = sendBundleAndGetResponse(bundleBuilder.getBundle());
|
||||
|
||||
assertEquals(1, responseEntries.size());
|
||||
|
||||
final Bundle.BundleEntryComponent bundleEntry = responseEntries.get(0);
|
||||
|
||||
assertEquals("200 OK", bundleEntry.getResponse().getStatus());
|
||||
|
||||
final List<Task> allTasksPostBundle = searchAllTasks();
|
||||
assertEquals(1, allTasksPostBundle.size());
|
||||
final Task taskPostBundle = allTasksPostBundle.get(0);
|
||||
assertEquals(VALUE_1, taskPostBundle.getIdentifier().get(0).getValue());
|
||||
assertEquals(SYSTEM, taskPostBundle.getIdentifier().get(0).getSystem());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
public void testConditionalCreateDependsOnFirstEntryExisting(boolean theHasQuestionMark) {
|
||||
final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
|
||||
|
||||
bundleBuilder.addTransactionCreateEntry(myTask1, "urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea")
|
||||
.conditional("identifier=http://tempuri.org|1");
|
||||
|
||||
final String secondEntryConditionalTemplate = "%sidentifier=http://tempuri.org|2&based-on=urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea";
|
||||
final String secondMatchUrl = String.format(secondEntryConditionalTemplate, theHasQuestionMark ? "?" : "");
|
||||
|
||||
bundleBuilder.addTransactionCreateEntry(myTask2)
|
||||
.conditional(secondMatchUrl);
|
||||
|
||||
final IBaseBundle requestBundle = bundleBuilder.getBundle();
|
||||
assertTrue(requestBundle instanceof Bundle);
|
||||
|
||||
final List<Bundle.BundleEntryComponent> responseEntries = sendBundleAndGetResponse(requestBundle);
|
||||
|
||||
assertEquals(2, responseEntries.size());
|
||||
assertEquals(Set.of("201 Created"), responseEntries.stream().map(Bundle.BundleEntryComponent::getResponse).map(Bundle.BundleEntryResponseComponent::getStatus).collect(Collectors.toUnmodifiableSet()));
|
||||
|
||||
final List<Task> allTasksPostBundle = searchAllTasks();
|
||||
assertEquals(2, allTasksPostBundle.size());
|
||||
final Task taskPostBundle1 = allTasksPostBundle.get(0);
|
||||
assertEquals(VALUE_1, taskPostBundle1.getIdentifier().get(0).getValue());
|
||||
assertEquals(SYSTEM, taskPostBundle1.getIdentifier().get(0).getSystem());
|
||||
final Task taskPostBundle2 = allTasksPostBundle.get(1);
|
||||
assertEquals(VALUE_2, taskPostBundle2.getIdentifier().get(0).getValue());
|
||||
assertEquals(SYSTEM, taskPostBundle2.getIdentifier().get(0).getSystem());
|
||||
|
||||
final List<Reference> task2BasedOn = taskPostBundle2.getBasedOn();
|
||||
assertEquals(1, task2BasedOn.size());
|
||||
final Reference task2BasedOnReference = task2BasedOn.get(0);
|
||||
assertEquals(taskPostBundle1.getIdElement().toUnqualifiedVersionless().asStringValue(), task2BasedOnReference.getReference());
|
||||
}
|
||||
}
|
||||
|
||||
private List<Bundle.BundleEntryComponent> sendBundleAndGetResponse(IBaseBundle theRequestBundle) {
|
||||
assertTrue(theRequestBundle instanceof Bundle);
|
||||
|
||||
return mySystemDao.transaction(new SystemRequestDetails(), (Bundle)theRequestBundle).getEntry();
|
||||
}
|
||||
|
||||
private List<Task> searchAllTasks() {
|
||||
return unsafeCast(getTaskDao().search(SearchParameterMap.newSynchronous(), new SystemRequestDetails()).getAllResources());
|
||||
}
|
||||
|
||||
private IFhirResourceDao<Task> getTaskDao() {
|
||||
return unsafeCast(myDaoRegistry.getResourceDao("Task"));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> T unsafeCast(Object theObject) {
|
||||
return (T)theObject;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3379,7 +3379,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(7, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
|
@ -3462,7 +3462,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(5, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
}
|
||||
|
|
|
@ -353,7 +353,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
|
|||
.setSystem(codeSystem.getUrl())
|
||||
.addFilter()
|
||||
.setProperty("concept")
|
||||
.setOp(FilterOperator.ISA)
|
||||
.setOp(FilterOperator.DESCENDENTOF)
|
||||
.setValue("dogs");
|
||||
|
||||
myValueSetDao.create(valueSet, mySrd);
|
||||
|
@ -584,7 +584,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
|
|||
logAndValidateValueSet(result);
|
||||
|
||||
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
|
||||
assertThat(codes, containsInAnyOrder("childAAA", "childAAB"));
|
||||
assertThat(codes, containsInAnyOrder("childAA", "childAAA", "childAAB"));
|
||||
|
||||
}
|
||||
|
||||
|
@ -610,6 +610,34 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
|
|||
logAndValidateValueSet(result);
|
||||
|
||||
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
|
||||
assertEquals(3, codes.size());
|
||||
assertThat(codes, containsInAnyOrder("childAA", "childAAA", "childAAB"));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpandWithDescendentOfInExternalValueSetReindex() {
|
||||
TermReindexingSvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
|
||||
|
||||
createExternalCsAndLocalVs();
|
||||
|
||||
myResourceReindexingSvc.markAllResourcesForReindexing();
|
||||
myResourceReindexingSvc.forceReindexingPass();
|
||||
myResourceReindexingSvc.forceReindexingPass();
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
|
||||
ValueSet vs = new ValueSet();
|
||||
ConceptSetComponent include = vs.getCompose().addInclude();
|
||||
include.setSystem(TermTestUtil.URL_MY_CODE_SYSTEM);
|
||||
include.addFilter().setOp(FilterOperator.DESCENDENTOF).setValue("childAA").setProperty("concept");
|
||||
|
||||
ValueSet result = myValueSetDao.expand(vs, null); // breakpoint
|
||||
logAndValidateValueSet(result);
|
||||
|
||||
ArrayList<String> codes = toCodesContains(result.getExpansion().getContains());
|
||||
assertEquals(2, codes.size());
|
||||
assertThat(codes, containsInAnyOrder("childAAA", "childAAB"));
|
||||
|
||||
}
|
||||
|
@ -795,7 +823,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
|
|||
ValueSet vs = new ValueSet();
|
||||
ConceptSetComponent include = vs.getCompose().addInclude();
|
||||
include.setSystem(TermTestUtil.URL_MY_CODE_SYSTEM);
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.ISA).setValue("ParentA");
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.DESCENDENTOF).setValue("ParentA");
|
||||
|
||||
ValueSet result = myValueSetDao.expand(vs, null);
|
||||
logAndValidateValueSet(result);
|
||||
|
@ -814,7 +842,7 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
|
|||
vs = new ValueSet();
|
||||
include = vs.getCompose().addInclude();
|
||||
include.setSystem(TermTestUtil.URL_MY_CODE_SYSTEM);
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.ISA).setValue("ParentA");
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.DESCENDENTOF).setValue("ParentA");
|
||||
result = myValueSetDao.expand(vs, null);
|
||||
logAndValidateValueSet(result);
|
||||
|
||||
|
|
|
@ -380,17 +380,11 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
observation.getEncounter().setReference(encounter.getId()); // not versioned
|
||||
builder.addTransactionCreateEntry(observation);
|
||||
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("200 OK", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
assertEquals("200 OK", outcome.getEntry().get(1).getResponse().getStatus());
|
||||
assertEquals("201 Created", outcome.getEntry().get(2).getResponse().getStatus());
|
||||
Bundle outcome = createAndValidateBundle((Bundle) builder.getBundle(),
|
||||
List.of("200 OK", "200 OK", "201 Created"), List.of("2", "1", "1"));
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType encounterId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(2).getResponse().getLocation());
|
||||
assertEquals("2", patientId.getVersionIdPart());
|
||||
assertEquals("1", encounterId.getVersionIdPart());
|
||||
assertEquals("1", observationId.getVersionIdPart());
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
|
@ -429,14 +423,10 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
builder.addTransactionCreateEntry(observation);
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("200 OK", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus());
|
||||
Bundle outcome = createAndValidateBundle((Bundle) builder.getBundle(),
|
||||
List.of("200 OK", "201 Created"), List.of("3", "1"));
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
assertEquals("3", patientId.getVersionIdPart());
|
||||
assertEquals("1", observationId.getVersionIdPart());
|
||||
|
||||
// Make sure we're not introducing any extra DB operations
|
||||
assertEquals(3, myCaptureQueriesListener.logSelectQueries().size());
|
||||
|
@ -468,14 +458,10 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
|
||||
myCaptureQueriesListener.clear();
|
||||
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("200 OK", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus());
|
||||
Bundle outcome = createAndValidateBundle((Bundle) builder.getBundle(),
|
||||
List.of("200 OK", "201 Created"), List.of("3", "1"));
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
assertEquals("3", patientId.getVersionIdPart());
|
||||
assertEquals("1", observationId.getVersionIdPart());
|
||||
|
||||
// Make sure we're not introducing any extra DB operations
|
||||
assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
|
||||
|
@ -563,20 +549,91 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
builder.addTransactionCreateEntry(messageHeader);
|
||||
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(builder.getBundle()));
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("201 Created", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
|
||||
Bundle outcome = createAndValidateBundle((Bundle) builder.getBundle(),
|
||||
List.of("201 Created"), List.of("1"));
|
||||
IdType messageHeaderId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
assertEquals("2", patient.getIdElement().getVersionIdPart());
|
||||
assertEquals("1", messageHeaderId.getVersionIdPart());
|
||||
|
||||
// read back and verify that reference is versioned
|
||||
messageHeader = myMessageHeaderDao.read(messageHeaderId);
|
||||
assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("#5619 Incorrect version of auto versioned reference for conditional update with urn id placeholder")
|
||||
public void testInsertVersionedReferencesByPath_conditionalUpdateNoOpInTransaction_addsCorrectVersionToReference() {
|
||||
Supplier<Bundle> supplier = () -> {
|
||||
// create patient
|
||||
Patient patient = new Patient();
|
||||
patient.setActive(true);
|
||||
patient.addIdentifier().setSystem("http://example.com").setValue("test");
|
||||
|
||||
// add patient to the Bundle - conditional update with placeholder url
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.TRANSACTION);
|
||||
bundle.addEntry()
|
||||
.setResource(patient)
|
||||
.setFullUrl("urn:uuid:00001")
|
||||
.getRequest()
|
||||
.setMethod(Bundle.HTTPVerb.PUT)
|
||||
.setUrl("Patient?identifier=http://example.com|test");
|
||||
|
||||
// create MessageHeader
|
||||
MessageHeader messageHeader = new MessageHeader();
|
||||
messageHeader.getMeta().setExtension(messageHeaderAutoVersionExtension);
|
||||
// add reference
|
||||
messageHeader.addFocus().setReference("urn:uuid:00001");
|
||||
|
||||
bundle.addEntry()
|
||||
.setResource(messageHeader)
|
||||
.getRequest()
|
||||
.setMethod(Bundle.HTTPVerb.POST)
|
||||
.setUrl("/MessageHeader");
|
||||
|
||||
return bundle;
|
||||
};
|
||||
|
||||
// create bundle first time
|
||||
Bundle outcome = createAndValidateBundle(supplier.get(),
|
||||
List.of("201 Created", "201 Created"), List.of("1", "1"));
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType messageHeaderId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
|
||||
// read back and verify that reference is versioned and correct
|
||||
Patient patient = myPatientDao.read(patientId);
|
||||
MessageHeader messageHeader = myMessageHeaderDao.read(messageHeaderId);
|
||||
assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference());
|
||||
|
||||
// create bundle second time
|
||||
outcome = createAndValidateBundle(supplier.get(), List.of("200 OK", "201 Created"), List.of("1", "1"));
|
||||
patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
messageHeaderId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
|
||||
// read back and verify that reference is versioned and correct
|
||||
patient = myPatientDao.read(patientId);
|
||||
messageHeader = myMessageHeaderDao.read(messageHeaderId);
|
||||
assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference());
|
||||
}
|
||||
|
||||
private Bundle createAndValidateBundle(Bundle theBundle, List<String> theOutcomeStatuses,
|
||||
List<String> theOutcomeVersions) {
|
||||
assertEquals(theBundle.getEntry().size(), theOutcomeStatuses.size(),
|
||||
"Size of OutcomeStatuses list is incorrect");
|
||||
assertEquals(theBundle.getEntry().size(), theOutcomeVersions.size(),
|
||||
"Size of OutcomeVersions list is incorrect");
|
||||
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(theBundle));
|
||||
Bundle result = mySystemDao.transaction(mySrd, theBundle);
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(theBundle));
|
||||
|
||||
for (int i = 0; i < result.getEntry().size(); i++) {
|
||||
assertEquals(theOutcomeStatuses.get(i), result.getEntry().get(i).getResponse().getStatus());
|
||||
IIdType resultId = new IdType(result.getEntry().get(i).getResponse().getLocation());
|
||||
assertEquals(theOutcomeVersions.get(i), resultId.getVersionIdPart());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private Patient createAndUpdatePatient(String thePatientId) {
|
||||
Patient patient = new Patient();
|
||||
patient.setId(thePatientId);
|
||||
|
|
|
@ -1,24 +1,32 @@
|
|||
package ca.uhn.fhir.jpa.interceptor;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4SystemTest;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
|
||||
import ca.uhn.fhir.jpa.util.SqlQuery;
|
||||
import ca.uhn.fhir.model.api.Include;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import ca.uhn.fhir.util.MultimapCollector;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.collect.Multimap;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
|
@ -32,6 +40,7 @@ import org.junit.jupiter.api.AfterEach;
|
|||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.mock.mockito.SpyBean;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -47,21 +56,25 @@ import static org.hamcrest.Matchers.matchesPattern;
|
|||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest {
|
||||
|
||||
public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Test {
|
||||
public static final int ALTERNATE_DEFAULT_ID = -1;
|
||||
private PatientIdPartitionInterceptor mySvc;
|
||||
|
||||
private ForceOffsetSearchModeInterceptor myForceOffsetSearchModeInterceptor;
|
||||
|
||||
@Autowired
|
||||
private ISearchParamExtractor mySearchParamExtractor;
|
||||
|
||||
@SpyBean
|
||||
@Autowired
|
||||
private PatientIdPartitionInterceptor mySvc;
|
||||
|
||||
@Override
|
||||
@BeforeEach
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
mySvc = new PatientIdPartitionInterceptor(myFhirContext, mySearchParamExtractor, myPartitionSettings);
|
||||
myForceOffsetSearchModeInterceptor = new ForceOffsetSearchModeInterceptor();
|
||||
|
||||
myInterceptorRegistry.registerInterceptor(mySvc);
|
||||
|
@ -539,4 +552,29 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest {
|
|||
return (Patient)update.getResource();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIdentifyForRead_serverOperation_returnsAllPartitions() {
|
||||
ReadPartitionIdRequestDetails readRequestDetails = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_EXPORT);
|
||||
RequestPartitionId requestPartitionId = mySvc.identifyForRead(readRequestDetails, mySrd);
|
||||
assertEquals(requestPartitionId, RequestPartitionId.allPartitions());
|
||||
assertEquals(RestOperationTypeEnum.EXTENDED_OPERATION_SERVER, readRequestDetails.getRestOperationType());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSystemBulkExport_withPatientIdPartitioningWithNoResourceType_usesNonPatientSpecificPartition() throws IOException {
|
||||
final BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + ProviderConstants.OPERATION_EXPORT);
|
||||
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||
|
||||
try (CloseableHttpResponse postResponse = myServer.getHttpClient().execute(post)){
|
||||
ourLog.info("Response: {}",postResponse);
|
||||
assertEquals(202, postResponse.getStatusLine().getStatusCode());
|
||||
assertEquals("Accepted", postResponse.getStatusLine().getReasonPhrase());
|
||||
}
|
||||
|
||||
verify(mySvc).provideNonPatientSpecificQueryResponse(any());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
|
||||
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
|
@ -14,6 +13,7 @@ import ca.uhn.fhir.rest.api.Constants;
|
|||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.client.interceptor.SimpleRequestHeaderInterceptor;
|
||||
import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
|
@ -37,11 +37,13 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
|||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Composition;
|
||||
import org.hl7.fhir.r4.model.Condition;
|
||||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.ExplanationOfBenefit;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Identifier;
|
||||
import org.hl7.fhir.r4.model.MessageHeader;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Observation.ObservationStatus;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
|
@ -49,11 +51,14 @@ import org.hl7.fhir.r4.model.Parameters;
|
|||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Practitioner;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.Resource;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.EnumSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -68,7 +73,9 @@ import static org.hamcrest.Matchers.containsString;
|
|||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Test {
|
||||
|
@ -79,6 +86,8 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes
|
|||
private SearchParamMatcher mySearchParamMatcher;
|
||||
@Autowired
|
||||
private ThreadSafeResourceDeleterSvc myThreadSafeResourceDeleterSvc;
|
||||
private AuthorizationInterceptor myReadAllBundleInterceptor;
|
||||
private AuthorizationInterceptor myReadAllPatientInterceptor;
|
||||
|
||||
@BeforeEach
|
||||
@Override
|
||||
|
@ -87,7 +96,8 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes
|
|||
myStorageSettings.setAllowMultipleDelete(true);
|
||||
myStorageSettings.setExpungeEnabled(true);
|
||||
myStorageSettings.setDeleteExpungeEnabled(true);
|
||||
myServer.getRestfulServer().registerInterceptor(new BulkDataExportProvider());
|
||||
myReadAllBundleInterceptor = new ReadAllAuthorizationInterceptor("Bundle");
|
||||
myReadAllPatientInterceptor = new ReadAllAuthorizationInterceptor("Patient");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1506,4 +1516,250 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchBundles_withPermissionToSearchAllBundles_doesNotReturn403ForbiddenForDocumentBundles(){
|
||||
myServer.getRestfulServer().registerInterceptor(myReadAllBundleInterceptor);
|
||||
|
||||
Bundle bundle1 = createDocumentBundle(createPatient("John", "Smith"));
|
||||
Bundle bundle2 = createDocumentBundle(createPatient("Jane", "Doe"));
|
||||
assertSearchContainsResources("/Bundle", bundle1, bundle2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchBundles_withPermissionToSearchAllBundles_doesNotReturn403ForbiddenForCollectionBundles(){
|
||||
myServer.getRestfulServer().registerInterceptor(myReadAllBundleInterceptor);
|
||||
|
||||
Bundle bundle1 = createCollectionBundle(createPatient("John", "Smith"));
|
||||
Bundle bundle2 = createCollectionBundle(createPatient("Jane", "Doe"));
|
||||
assertSearchContainsResources("/Bundle", bundle1, bundle2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchBundles_withPermissionToSearchAllBundles_doesNotReturn403ForbiddenForMessageBundles(){
|
||||
myServer.getRestfulServer().registerInterceptor(myReadAllBundleInterceptor);
|
||||
|
||||
Bundle bundle1 = createMessageHeaderBundle(createPatient("John", "Smith"));
|
||||
Bundle bundle2 = createMessageHeaderBundle(createPatient("Jane", "Doe"));
|
||||
assertSearchContainsResources("/Bundle", bundle1, bundle2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchBundles_withPermissionToViewOneBundle_onlyAllowsViewingOneBundle(){
|
||||
Bundle bundle1 = createMessageHeaderBundle(createPatient("John", "Smith"));
|
||||
Bundle bundle2 = createMessageHeaderBundle(createPatient("Jane", "Doe"));
|
||||
|
||||
myServer.getRestfulServer().getInterceptorService().registerInterceptor(
|
||||
new ReadInCompartmentAuthorizationInterceptor("Bundle", bundle1.getIdElement())
|
||||
);
|
||||
|
||||
assertSearchContainsResources("/Bundle?_id=" + bundle1.getIdPart(), bundle1);
|
||||
assertSearchFailsWith403Forbidden("/Bundle?_id=" + bundle2.getIdPart());
|
||||
assertSearchFailsWith403Forbidden("/Bundle");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchPatients_withPermissionToSearchAllBundles_returns403Forbidden(){
|
||||
myServer.getRestfulServer().registerInterceptor(myReadAllBundleInterceptor);
|
||||
|
||||
createPatient("John", "Smith");
|
||||
createPatient("Jane", "Doe");
|
||||
assertSearchFailsWith403Forbidden("/Patient");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchPatients_withPermissionToSearchAllPatients_returnsAllPatients(){
|
||||
myServer.getRestfulServer().registerInterceptor(myReadAllPatientInterceptor);
|
||||
|
||||
Patient patient1 = createPatient("John", "Smith");
|
||||
Patient patient2 = createPatient("Jane", "Doe");
|
||||
assertSearchContainsResources("/Patient", patient1, patient2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchPatients_withPermissionToViewOnePatient_onlyAllowsViewingOnePatient(){
|
||||
Patient patient1 = createPatient("John", "Smith");
|
||||
Patient patient2 = createPatient("Jane", "Doe");
|
||||
|
||||
myServer.getRestfulServer().getInterceptorService().registerInterceptor(
|
||||
new ReadInCompartmentAuthorizationInterceptor("Patient", patient1.getIdElement())
|
||||
);
|
||||
|
||||
assertSearchContainsResources("/Patient?_id=" + patient1.getIdPart(), patient1);
|
||||
assertSearchFailsWith403Forbidden("/Patient?_id=" + patient2.getIdPart());
|
||||
assertSearchFailsWith403Forbidden("/Patient");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToListOfResourcesAndExcludeContainer_withSearchSetContainingDocumentBundles_onlyRecursesOneLevelDeep() {
|
||||
Bundle bundle1 = createDocumentBundle(createPatient("John", "Smith"));
|
||||
Bundle bundle2 = createDocumentBundle(createPatient("John", "Smith"));
|
||||
Bundle searchSet = createSearchSet(bundle1, bundle2);
|
||||
|
||||
RequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setResourceName("Bundle");
|
||||
|
||||
List<IBaseResource> resources = AuthorizationInterceptor.toListOfResourcesAndExcludeContainer(requestDetails, searchSet, myFhirContext);
|
||||
assertEquals(2, resources.size());
|
||||
assertTrue(resources.contains(bundle1));
|
||||
assertTrue(resources.contains(bundle2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToListOfResourcesAndExcludeContainer_withSearchSetContainingPatients_returnsPatients() {
|
||||
Patient patient1 = createPatient("John", "Smith");
|
||||
Patient patient2 = createPatient("Jane", "Doe");
|
||||
Bundle searchSet = createSearchSet(patient1, patient2);
|
||||
|
||||
RequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setResourceName("Patient");
|
||||
|
||||
List<IBaseResource> resources = AuthorizationInterceptor.toListOfResourcesAndExcludeContainer(requestDetails, searchSet, myFhirContext);
|
||||
assertEquals(2, resources.size());
|
||||
assertTrue(resources.contains(patient1));
|
||||
assertTrue(resources.contains(patient2));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@EnumSource(value = Bundle.BundleType.class, names = {"DOCUMENT", "COLLECTION", "MESSAGE"})
|
||||
public void testShouldExamineBundleResources_withBundleRequestAndStandAloneBundleType_returnsFalse(Bundle.BundleType theBundleType){
|
||||
RequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setResourceName("Bundle");
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(theBundleType);
|
||||
|
||||
assertFalse(AuthorizationInterceptor.shouldExamineBundleChildResources(requestDetails, myFhirContext, bundle));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@EnumSource(value = Bundle.BundleType.class, names = {"DOCUMENT", "COLLECTION", "MESSAGE"}, mode= EnumSource.Mode.EXCLUDE)
|
||||
public void testShouldExamineBundleResources_withBundleRequestAndNonStandAloneBundleType_returnsTrue(Bundle.BundleType theBundleType){
|
||||
RequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setResourceName("Bundle");
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(theBundleType);
|
||||
|
||||
assertTrue(AuthorizationInterceptor.shouldExamineBundleChildResources(requestDetails, myFhirContext, bundle));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@EnumSource(value = Bundle.BundleType.class)
|
||||
public void testShouldExamineBundleResources_withNonBundleRequests_returnsTrue(Bundle.BundleType theBundleType){
|
||||
RequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setResourceName("Patient");
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(theBundleType);
|
||||
|
||||
assertTrue(AuthorizationInterceptor.shouldExamineBundleChildResources(requestDetails, myFhirContext, bundle));
|
||||
}
|
||||
|
||||
private Patient createPatient(String theFirstName, String theLastName){
|
||||
Patient patient = new Patient();
|
||||
patient.addName().addGiven(theFirstName).setFamily(theLastName);
|
||||
return (Patient) myPatientDao.create(patient, mySrd).getResource();
|
||||
}
|
||||
|
||||
private Bundle createDocumentBundle(Patient thePatient){
|
||||
Composition composition = new Composition();
|
||||
composition.setType(new CodeableConcept().addCoding(new Coding().setSystem("http://example.org").setCode("some-type")));
|
||||
composition.getSubject().setReference(thePatient.getIdElement().getValue());
|
||||
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.DOCUMENT);
|
||||
bundle.addEntry().setResource(composition);
|
||||
bundle.addEntry().setResource(thePatient);
|
||||
return (Bundle) myBundleDao.create(bundle, mySrd).getResource();
|
||||
}
|
||||
|
||||
private Bundle createCollectionBundle(Patient thePatient) {
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.COLLECTION);
|
||||
bundle.addEntry().setResource(thePatient);
|
||||
return (Bundle) myBundleDao.create(bundle, mySrd).getResource();
|
||||
}
|
||||
|
||||
private Bundle createMessageHeaderBundle(Patient thePatient) {
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.MESSAGE);
|
||||
|
||||
MessageHeader messageHeader = new MessageHeader();
|
||||
Coding event = new Coding().setSystem("http://acme.com").setCode("some-event");
|
||||
messageHeader.setEvent(event);
|
||||
messageHeader.getFocusFirstRep().setReference(thePatient.getIdElement().getValue());
|
||||
bundle.addEntry().setResource(messageHeader);
|
||||
bundle.addEntry().setResource(thePatient);
|
||||
|
||||
return (Bundle) myBundleDao.create(bundle, mySrd).getResource();
|
||||
}
|
||||
|
||||
private void assertSearchContainsResources(String theUrl, Resource... theExpectedResources){
|
||||
List<String> expectedIds = Arrays.stream(theExpectedResources)
|
||||
.map(resource -> resource.getIdPart())
|
||||
.toList();
|
||||
|
||||
Bundle searchResult = myClient
|
||||
.search()
|
||||
.byUrl(theUrl)
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
|
||||
List<String> actualIds = searchResult.getEntry().stream()
|
||||
.map(entry -> entry.getResource().getIdPart())
|
||||
.toList();
|
||||
|
||||
assertEquals(expectedIds.size(), actualIds.size());
|
||||
assertTrue(expectedIds.containsAll(actualIds));
|
||||
}
|
||||
|
||||
private void assertSearchFailsWith403Forbidden(String theUrl){
|
||||
try {
|
||||
myClient.search().byUrl(theUrl).execute();
|
||||
fail();
|
||||
} catch (Exception e){
|
||||
assertTrue(e.getMessage().contains("HTTP 403 Forbidden"));
|
||||
}
|
||||
}
|
||||
|
||||
private Bundle createSearchSet(Resource... theResources){
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.SEARCHSET);
|
||||
Arrays.stream(theResources).forEach(resource -> bundle.addEntry().setResource(resource));
|
||||
return bundle;
|
||||
}
|
||||
|
||||
static class ReadAllAuthorizationInterceptor extends AuthorizationInterceptor {
|
||||
|
||||
private final String myResourceType;
|
||||
|
||||
public ReadAllAuthorizationInterceptor(String theResourceType){
|
||||
super(PolicyEnum.DENY);
|
||||
myResourceType = theResourceType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
|
||||
return new RuleBuilder()
|
||||
.allow().read().resourcesOfType(myResourceType).withAnyId().andThen()
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
static class ReadInCompartmentAuthorizationInterceptor extends AuthorizationInterceptor {
|
||||
|
||||
private final String myResourceType;
|
||||
private final IIdType myId;
|
||||
|
||||
public ReadInCompartmentAuthorizationInterceptor(String theResourceType, IIdType theId){
|
||||
super(PolicyEnum.DENY);
|
||||
myResourceType = theResourceType;
|
||||
myId = theId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
|
||||
return new RuleBuilder()
|
||||
.allow().read().allResources().inCompartment(myResourceType, myId).andThen()
|
||||
.build();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,8 +43,6 @@ public class BulkExportProviderR4Test extends BaseResourceProviderR4Test {
|
|||
|
||||
assertThat(e.getStatusCode(), equalTo(404));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testBulkExport_typePatientIdNotExists_throws404() {
|
||||
// given no data
|
||||
|
|
|
@ -202,7 +202,7 @@ public class ResourceProviderR4ValueSetNoVerCSNoVerTest extends BaseResourceProv
|
|||
myLocalVs.setUrl(URL_MY_VALUE_SET);
|
||||
ConceptSetComponent include = myLocalVs.getCompose().addInclude();
|
||||
include.setSystem(codeSystem.getUrl());
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.ISA).setValue("ParentA");
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.DESCENDENTOF).setValue("ParentA");
|
||||
myLocalValueSetId = myValueSetDao.create(myLocalVs, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
|
@ -1199,7 +1199,7 @@ public class ResourceProviderR4ValueSetNoVerCSNoVerTest extends BaseResourceProv
|
|||
.setSystem(URL_MY_CODE_SYSTEM)
|
||||
.addFilter()
|
||||
.setProperty("concept")
|
||||
.setOp(FilterOperator.ISA)
|
||||
.setOp(FilterOperator.DESCENDENTOF)
|
||||
.setValue("A");
|
||||
myLocalVs
|
||||
.getCompose()
|
||||
|
|
|
@ -167,7 +167,7 @@ public class ResourceProviderR4ValueSetVerCSNoVerTest extends BaseResourceProvid
|
|||
myLocalVs.setUrl(URL_MY_VALUE_SET);
|
||||
ConceptSetComponent include = myLocalVs.getCompose().addInclude();
|
||||
include.setSystem(codeSystem.getUrl());
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.ISA).setValue("ParentA");
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.DESCENDENTOF).setValue("ParentA");
|
||||
myLocalValueSetId = myValueSetDao.create(myLocalVs, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
|
|
|
@ -196,7 +196,7 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider
|
|||
ConceptSetComponent include = myLocalVs.getCompose().addInclude();
|
||||
include.setSystem(theCodeSystemUrl);
|
||||
include.setVersion(theValueSetVersion);
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.ISA).setValue("ParentA");
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.DESCENDENTOF).setValue("ParentA");
|
||||
return myLocalVs;
|
||||
|
||||
}
|
||||
|
|
|
@ -17,8 +17,10 @@ import ca.uhn.fhir.jpa.cache.ResourceChangeListenerCacheRefresherImpl;
|
|||
import ca.uhn.fhir.jpa.cache.ResourceChangeListenerRegistryImpl;
|
||||
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
|
||||
import ca.uhn.fhir.jpa.cache.ResourceVersionMap;
|
||||
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
|
||||
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
|
||||
import ca.uhn.fhir.jpa.dao.JpaResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.ResourceHistoryCalculator;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.DaoSearchParamSynchronizer;
|
||||
|
@ -148,6 +150,7 @@ public class GiantTransactionPerfTest {
|
|||
private IIdHelperService myIdHelperService;
|
||||
@Mock
|
||||
private IJpaStorageResourceParser myJpaStorageResourceParser;
|
||||
private final ResourceHistoryCalculator myResourceHistoryCalculator = new ResourceHistoryCalculator(FhirContext.forR4Cached(), false);
|
||||
private IMetaTagSorter myMetaTagSorter;
|
||||
|
||||
@AfterEach
|
||||
|
@ -271,6 +274,7 @@ public class GiantTransactionPerfTest {
|
|||
myEobDao.setJpaStorageResourceParserForUnitTest(myJpaStorageResourceParser);
|
||||
myEobDao.setExternallyStoredResourceServiceRegistryForUnitTest(new ExternallyStoredResourceServiceRegistry());
|
||||
myEobDao.setMyMetaTagSorter(myMetaTagSorter);
|
||||
myEobDao.setResourceHistoryCalculator(myResourceHistoryCalculator);
|
||||
myEobDao.start();
|
||||
|
||||
myDaoRegistry.setResourceDaos(Lists.newArrayList(myEobDao));
|
||||
|
|
|
@ -208,7 +208,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test {
|
|||
myLocalVs.setUrl(URL_MY_VALUE_SET);
|
||||
ConceptSetComponent include = myLocalVs.getCompose().addInclude();
|
||||
include.setSystem(codeSystem.getUrl());
|
||||
include.addFilter().setProperty("concept").setOp(Enumerations.FilterOperator.ISA).setValue("ParentA");
|
||||
include.addFilter().setProperty("concept").setOp(Enumerations.FilterOperator.DESCENDENTOF).setValue("ParentA");
|
||||
myLocalValueSetId = myValueSetDao.create(myLocalVs, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
|
|
|
@ -231,7 +231,7 @@ public class ResourceProviderR5ValueSetVersionedTest extends BaseResourceProvide
|
|||
ConceptSetComponent include = myLocalVs.getCompose().addInclude();
|
||||
include.setSystem(theCodeSystemUrl);
|
||||
include.setVersion(theValueSetVersion);
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.ISA).setValue("ParentA");
|
||||
include.addFilter().setProperty("concept").setOp(FilterOperator.DESCENDENTOF).setValue("ParentA");
|
||||
return myLocalVs;
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,135 @@
|
|||
package ca.uhn.fhir.jpa.auth;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.IAuthRule;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.IRuleApplier;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.RuleBuilder;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.r4.model.CodeType;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class RuleImplOpTest {
|
||||
private static final String OPERATION = "operation";
|
||||
private static final String TYPE = "type";
|
||||
private static final String PATH = "path";
|
||||
private static final String VALUE = "value";
|
||||
private static final String REPLACE = "replace";
|
||||
private static final String PATIENT_BIRTH_DATE = "Patient.birthDate";
|
||||
private static final Parameters PARAMETERS = buildParameters();
|
||||
private static final String DOCUMENT = "document";
|
||||
private static final String ERROR_TEMPLATE = "HAPI-0339: Can not handle transaction with nested resource of type %s";
|
||||
private static final String ERROR_PARAMETERS = String.format(ERROR_TEMPLATE, "Parameters");
|
||||
private static final String ERROR_BUNDLE = String.format(ERROR_TEMPLATE, "Bundle");
|
||||
|
||||
private static final String REQUEST_RULELIST = AuthorizationInterceptor.class.getName() + "_1_RULELIST";
|
||||
private final Patient myPatient = buildPatient();
|
||||
|
||||
private final List<IAuthRule> myRules = new RuleBuilder()
|
||||
.allow()
|
||||
.transaction()
|
||||
.withAnyOperation()
|
||||
.andApplyNormalRules()
|
||||
.andThen()
|
||||
.allow()
|
||||
.write()
|
||||
.allResources()
|
||||
.withAnyId()
|
||||
.build();
|
||||
|
||||
private final IAuthRule myRule = myRules.get(0);
|
||||
private final FhirContext myFhirContext = FhirContext.forR4Cached();
|
||||
private final IBaseBundle myInnerBundle = buildInnerBundler(myFhirContext);
|
||||
|
||||
private final RequestDetails mySystemRequestDetails = buildSystemRequestDetails(myFhirContext, myRules);
|
||||
private final IRuleApplier myRuleApplier = new AuthorizationInterceptor();
|
||||
|
||||
@Test
|
||||
void testTransactionBundleUpdateWithParameters() {
|
||||
final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
|
||||
bundleBuilder.addTransactionUpdateEntry(PARAMETERS);
|
||||
|
||||
try {
|
||||
applyRule(bundleBuilder.getBundle());
|
||||
fail("Expected an InvalidRequestException");
|
||||
} catch (InvalidRequestException exception) {
|
||||
assertEquals(ERROR_PARAMETERS, exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransactionBundleWithNestedBundle() {
|
||||
final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
|
||||
bundleBuilder.addTransactionCreateEntry(myInnerBundle);
|
||||
|
||||
try {
|
||||
applyRule(bundleBuilder.getBundle());
|
||||
fail("Expected an InvalidRequestException");
|
||||
} catch (InvalidRequestException exception) {
|
||||
assertEquals(ERROR_BUNDLE, exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransactionBundlePatchWithParameters() {
|
||||
final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
|
||||
bundleBuilder.addTransactionFhirPatchEntry(myPatient.getIdElement(), PARAMETERS);
|
||||
|
||||
final AuthorizationInterceptor.Verdict verdict = applyRule(bundleBuilder.getBundle());
|
||||
|
||||
assertThat(verdict.getDecision(), equalTo(PolicyEnum.ALLOW));
|
||||
}
|
||||
|
||||
private AuthorizationInterceptor.Verdict applyRule(IBaseBundle theBundle) {
|
||||
return myRule.applyRule(RestOperationTypeEnum.TRANSACTION, mySystemRequestDetails, theBundle, myPatient.getIdElement(), myPatient, myRuleApplier, new HashSet<>(), null);
|
||||
}
|
||||
|
||||
private static Parameters buildParameters() {
|
||||
final Parameters patch = new Parameters();
|
||||
|
||||
final Parameters.ParametersParameterComponent op = patch.addParameter().setName(OPERATION);
|
||||
op.addPart().setName(TYPE).setValue(new CodeType(REPLACE));
|
||||
op.addPart().setName(PATH).setValue(new CodeType(PATIENT_BIRTH_DATE));
|
||||
op.addPart().setName(VALUE).setValue(new StringType("1912-04-14"));
|
||||
|
||||
return patch;
|
||||
}
|
||||
|
||||
private static RequestDetails buildSystemRequestDetails(FhirContext theFhirContext, List<IAuthRule> theRules) {
|
||||
final SystemRequestDetails systemRequestDetails = new SystemRequestDetails();
|
||||
systemRequestDetails.setFhirContext(theFhirContext);
|
||||
systemRequestDetails.getUserData().put(REQUEST_RULELIST, theRules);
|
||||
|
||||
return systemRequestDetails;
|
||||
}
|
||||
|
||||
private static Patient buildPatient() {
|
||||
final Patient patient = new Patient();
|
||||
patient.setId(new IdType("Patient", "1"));
|
||||
return patient;
|
||||
}
|
||||
|
||||
private static IBaseBundle buildInnerBundler(FhirContext theFhirContext) {
|
||||
final BundleBuilder innerBundleBuilder = new BundleBuilder(theFhirContext);
|
||||
innerBundleBuilder.setType(DOCUMENT);
|
||||
return innerBundleBuilder.getBundle();
|
||||
}
|
||||
}
|
|
@ -18,7 +18,8 @@ import java.util.Set;
|
|||
import java.util.function.Consumer;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.isOneOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.oneOf;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
|
||||
|
@ -98,10 +99,10 @@ public class PartitionRunnerTest {
|
|||
getPartitionRunner(5).runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
List<HookParams> calls = myLatch.awaitExpected();
|
||||
PartitionCall partitionCall1 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 0);
|
||||
assertThat(partitionCall1.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2));
|
||||
assertThat(partitionCall1.threadName, is(oneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)));
|
||||
assertEquals(5, partitionCall1.size);
|
||||
PartitionCall partitionCall2 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 1);
|
||||
assertThat(partitionCall2.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2));
|
||||
assertThat(partitionCall2.threadName, is(oneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)));
|
||||
assertEquals(5, partitionCall2.size);
|
||||
assertNotEquals(partitionCall1.threadName, partitionCall2.threadName);
|
||||
}
|
||||
|
@ -119,14 +120,38 @@ public class PartitionRunnerTest {
|
|||
getPartitionRunner(5).runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
List<HookParams> calls = myLatch.awaitExpected();
|
||||
PartitionCall partitionCall1 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 0);
|
||||
assertThat(partitionCall1.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2));
|
||||
assertThat(partitionCall1.threadName, is(oneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)));
|
||||
assertEquals(true, nums.remove(partitionCall1.size));
|
||||
PartitionCall partitionCall2 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 1);
|
||||
assertThat(partitionCall2.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2));
|
||||
assertThat(partitionCall2.threadName, is(oneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)));
|
||||
assertEquals(true, nums.remove(partitionCall2.size));
|
||||
assertNotEquals(partitionCall1.threadName, partitionCall2.threadName);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* See #5636 $expunge operation ignoring ExpungeThreadCount setting in certain cases
|
||||
*/
|
||||
@Test
|
||||
public void testExpunge_withTasksSizeBiggerThanExecutorQueue_usesConfiguredNumberOfThreads() throws InterruptedException {
|
||||
// setup
|
||||
List<IResourcePersistentId> resourceIds = buildPidList(2500);
|
||||
Consumer<List<IResourcePersistentId>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
// with batch size = 2 we expect 2500/2 runnableTasks to be created
|
||||
myLatch.setExpectedCount(1250);
|
||||
|
||||
// execute
|
||||
getPartitionRunner(2, 2).runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
List<HookParams> calls = myLatch.awaitExpected();
|
||||
|
||||
// validate - only two threads should be used for execution
|
||||
for (int i = 0; i < 1250; i++) {
|
||||
PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, i);
|
||||
assertThat(partitionCall.threadName, is(oneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void tenItemsOneThread() throws InterruptedException {
|
||||
List<IResourcePersistentId> resourceIds = buildPidList(10);
|
||||
|
|
|
@ -25,12 +25,14 @@ import ca.uhn.fhir.i18n.Msg;
|
|||
import ca.uhn.fhir.interceptor.api.Hook;
|
||||
import ca.uhn.fhir.interceptor.api.Interceptor;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.consent.ConsentInterceptor;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import com.google.common.collect.Lists;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
|
@ -78,8 +80,12 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
|||
|
||||
public static final String REQUEST_ATTRIBUTE_BULK_DATA_EXPORT_OPTIONS =
|
||||
AuthorizationInterceptor.class.getName() + "_BulkDataExportOptions";
|
||||
public static final String BUNDLE = "Bundle";
|
||||
private static final AtomicInteger ourInstanceCount = new AtomicInteger(0);
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(AuthorizationInterceptor.class);
|
||||
private static final Set<BundleTypeEnum> STANDALONE_BUNDLE_RESOURCE_TYPES =
|
||||
Set.of(BundleTypeEnum.DOCUMENT, BundleTypeEnum.COLLECTION, BundleTypeEnum.MESSAGE);
|
||||
|
||||
private final int myInstanceIndex = ourInstanceCount.incrementAndGet();
|
||||
private final String myRequestSeenResourcesKey =
|
||||
AuthorizationInterceptor.class.getName() + "_" + myInstanceIndex + "_SEENRESOURCES";
|
||||
|
@ -525,7 +531,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
|||
case EXTENDED_OPERATION_TYPE:
|
||||
case EXTENDED_OPERATION_INSTANCE: {
|
||||
if (theResponseObject != null) {
|
||||
resources = toListOfResourcesAndExcludeContainer(theResponseObject, fhirContext);
|
||||
resources = toListOfResourcesAndExcludeContainer(theRequestDetails, theResponseObject, fhirContext);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -572,22 +578,23 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
|||
OUT,
|
||||
}
|
||||
|
||||
static List<IBaseResource> toListOfResourcesAndExcludeContainer(
|
||||
IBaseResource theResponseObject, FhirContext fhirContext) {
|
||||
public static List<IBaseResource> toListOfResourcesAndExcludeContainer(
|
||||
RequestDetails theRequestDetails, IBaseResource theResponseObject, FhirContext fhirContext) {
|
||||
if (theResponseObject == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<IBaseResource> retVal;
|
||||
|
||||
boolean isContainer = false;
|
||||
boolean shouldExamineChildResources = false;
|
||||
if (theResponseObject instanceof IBaseBundle) {
|
||||
isContainer = true;
|
||||
IBaseBundle bundle = (IBaseBundle) theResponseObject;
|
||||
shouldExamineChildResources = shouldExamineBundleChildResources(theRequestDetails, fhirContext, bundle);
|
||||
} else if (theResponseObject instanceof IBaseParameters) {
|
||||
isContainer = true;
|
||||
shouldExamineChildResources = true;
|
||||
}
|
||||
|
||||
if (!isContainer) {
|
||||
if (!shouldExamineChildResources) {
|
||||
return Collections.singletonList(theResponseObject);
|
||||
}
|
||||
|
||||
|
@ -604,6 +611,26 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method determines if the given Bundle should have permissions applied to the resources inside or
|
||||
* to the Bundle itself.
|
||||
*
|
||||
* This distinction is important in Bundle requests where a user has permissions to view all Bundles. In
|
||||
* this scenario we want to apply permissions to the Bundle itself and not the resources inside if
|
||||
* the Bundle is of type document, collection, or message.
|
||||
*/
|
||||
public static boolean shouldExamineBundleChildResources(
|
||||
RequestDetails theRequestDetails, FhirContext theFhirContext, IBaseBundle theBundle) {
|
||||
boolean isBundleRequest = theRequestDetails != null && BUNDLE.equals(theRequestDetails.getResourceName());
|
||||
if (!isBundleRequest) {
|
||||
return true;
|
||||
}
|
||||
BundleTypeEnum bundleType = BundleUtil.getBundleTypeEnum(theFhirContext, theBundle);
|
||||
boolean isStandaloneBundleResource =
|
||||
bundleType != null && STANDALONE_BUNDLE_RESOURCE_TYPES.contains(bundleType);
|
||||
return !isStandaloneBundleResource;
|
||||
}
|
||||
|
||||
public static class Verdict {
|
||||
|
||||
private final IAuthRule myDecidingRule;
|
||||
|
|
|
@ -63,6 +63,8 @@ import static org.hl7.fhir.instance.model.api.IAnyResource.SP_RES_ID;
|
|||
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
|
||||
class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(RuleImplOp.class);
|
||||
private static final String PARAMETERS = "Parameters";
|
||||
private static final String BUNDLE = "Bundle";
|
||||
|
||||
private AppliesTypeEnum myAppliesTo;
|
||||
private Set<String> myAppliesToTypes;
|
||||
|
@ -771,7 +773,10 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
|||
*/
|
||||
if (nextPart.getResource() != null) {
|
||||
RuntimeResourceDefinition resourceDef = ctx.getResourceDefinition(nextPart.getResource());
|
||||
if ("Parameters".equals(resourceDef.getName()) || "Bundle".equals(resourceDef.getName())) {
|
||||
|
||||
// TODO: LD: We should pursue a more ideal fix after the release to inspect the bundle more deeply
|
||||
// to ensure that it's a valid request
|
||||
if (shouldRejectBundleEntry(resourceDef, operation)) {
|
||||
throw new InvalidRequestException(Msg.code(339)
|
||||
+ "Can not handle transaction with nested resource of type " + resourceDef.getName());
|
||||
}
|
||||
|
@ -812,7 +817,7 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
|||
} else if (theOutputResource != null) {
|
||||
|
||||
List<IBaseResource> outputResources = AuthorizationInterceptor.toListOfResourcesAndExcludeContainer(
|
||||
theOutputResource, theRequestDetails.getFhirContext());
|
||||
theRequestDetails, theOutputResource, theRequestDetails.getFhirContext());
|
||||
|
||||
Verdict verdict = null;
|
||||
for (IBaseResource nextResource : outputResources) {
|
||||
|
@ -835,6 +840,24 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ascertain whether this transaction request contains a nested operations or nested transactions.
|
||||
* This is done carefully because a bundle can contain a nested PATCH with Parameters, which is supported but
|
||||
* a non-PATCH nested Parameters resource may be problematic.
|
||||
*
|
||||
* @param theResourceDef The {@link RuntimeResourceDefinition} associated with this bundle entry
|
||||
* @param theOperation The {@link RestOperationTypeEnum} associated with this bundle entry
|
||||
* @return true if we should reject this reject
|
||||
*/
|
||||
private boolean shouldRejectBundleEntry(
|
||||
RuntimeResourceDefinition theResourceDef, RestOperationTypeEnum theOperation) {
|
||||
final boolean isResourceParameters = PARAMETERS.equals(theResourceDef.getName());
|
||||
final boolean isResourceBundle = BUNDLE.equals(theResourceDef.getName());
|
||||
final boolean isOperationPatch = theOperation == RestOperationTypeEnum.PATCH;
|
||||
|
||||
return (isResourceParameters && !isOperationPatch) || isResourceBundle;
|
||||
}
|
||||
|
||||
private void setTargetFromResourceId(RequestDetails theRequestDetails, FhirContext ctx, RuleTarget target) {
|
||||
String[] idValues = theRequestDetails.getParameters().get(SP_RES_ID);
|
||||
target.resourceIds = new ArrayList<>();
|
||||
|
@ -909,7 +932,7 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
|||
|
||||
private boolean requestAppliesToTransaction(
|
||||
FhirContext theContext, RuleOpEnum theOp, IBaseResource theInputResource) {
|
||||
if (!"Bundle".equals(theContext.getResourceType(theInputResource))) {
|
||||
if (!BUNDLE.equals(theContext.getResourceType(theInputResource))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -44,6 +44,9 @@ public class CacheProvider<K, V> implements ca.uhn.fhir.sl.cache.CacheProvider<K
|
|||
public Cache<K, V> create(long timeoutMillis, long maximumSize) {
|
||||
return new CacheDelegator<K, V>(Caffeine.newBuilder()
|
||||
.expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS)
|
||||
// Caffeine locks the whole array when growing the hash table.
|
||||
// Set initial capacity to max to avoid this. All our caches are <1M entries.
|
||||
.initialCapacity((int) maximumSize)
|
||||
.maximumSize(maximumSize)
|
||||
.build());
|
||||
}
|
||||
|
@ -51,6 +54,9 @@ public class CacheProvider<K, V> implements ca.uhn.fhir.sl.cache.CacheProvider<K
|
|||
public LoadingCache<K, V> create(long timeoutMillis, long maximumSize, CacheLoader<K, V> loading) {
|
||||
return new LoadingCacheDelegator<K, V>(Caffeine.newBuilder()
|
||||
.expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS)
|
||||
// Caffeine locks the whole array when growing the hash table.
|
||||
// Set initial capacity to max to avoid this. All our caches are <1M entries.
|
||||
.initialCapacity((int) maximumSize)
|
||||
.maximumSize(maximumSize)
|
||||
.build(loading::load));
|
||||
}
|
||||
|
|
|
@ -597,11 +597,11 @@ public class Builder {
|
|||
"Only SELECT statements (including CTEs) are allowed here. Please check your SQL: [%s]",
|
||||
theSql));
|
||||
}
|
||||
ourLog.info("SQL to evaluate: {}", theSql);
|
||||
ourLog.debug("SQL to evaluate: {}", theSql);
|
||||
|
||||
myTask.addPrecondition(new ExecuteTaskPrecondition(
|
||||
() -> {
|
||||
ourLog.info("Checking precondition for SQL: {}", theSql);
|
||||
ourLog.debug("Checking precondition for SQL: {}", theSql);
|
||||
return MigrationJdbcUtils.queryForSingleBooleanResultMultipleThrowsException(
|
||||
theSql, myTask.newJdbcTemplate());
|
||||
},
|
||||
|
@ -614,6 +614,11 @@ public class Builder {
|
|||
myTask.setRunDuringSchemaInitialization(true);
|
||||
return this;
|
||||
}
|
||||
|
||||
public BuilderCompleteTask setTransactional(boolean theFlag) {
|
||||
myTask.setTransactional(theFlag);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public class BuilderAddTableRawSql {
|
||||
|
|
|
@ -29,6 +29,7 @@ import ca.uhn.fhir.i18n.Msg;
|
|||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
|
@ -185,9 +186,12 @@ public class BulkDataExportProvider {
|
|||
theOptions.setResourceTypes(resourceTypes);
|
||||
}
|
||||
|
||||
ReadPartitionIdRequestDetails theDetails =
|
||||
ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_EXPORT);
|
||||
|
||||
// Determine and validate partition permissions (if needed).
|
||||
RequestPartitionId partitionId =
|
||||
myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null);
|
||||
myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, theDetails);
|
||||
myRequestPartitionHelperService.validateHasPartitionPermissions(theRequestDetails, "Binary", partitionId);
|
||||
theOptions.setPartitionId(partitionId);
|
||||
|
||||
|
@ -502,6 +506,9 @@ public class BulkDataExportProvider {
|
|||
|
||||
String serverBase = getServerBase(theRequestDetails);
|
||||
|
||||
// an output is required, even if empty, according to HL7 FHIR IG
|
||||
bulkResponseDocument.getOutput();
|
||||
|
||||
for (Map.Entry<String, List<String>> entrySet :
|
||||
results.getResourceTypeToBinaryIds().entrySet()) {
|
||||
String resourceType = entrySet.getKey();
|
||||
|
|
|
@ -38,6 +38,7 @@ import java.io.InputStreamReader;
|
|||
import java.io.Reader;
|
||||
import java.io.StringReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Base64;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
@ -56,8 +57,36 @@ public class BulkImportFileServlet extends HttpServlet {
|
|||
|
||||
public static final String DEFAULT_HEADER_CONTENT_TYPE = CT_FHIR_NDJSON + CHARSET_UTF8_CTSUFFIX;
|
||||
|
||||
private String myBasicAuth;
|
||||
|
||||
public BulkImportFileServlet() {}
|
||||
|
||||
public BulkImportFileServlet(String theBasicAuthUsername, String theBasicAuthPassword) {
|
||||
setBasicAuth(theBasicAuthUsername, theBasicAuthPassword);
|
||||
}
|
||||
|
||||
public void setBasicAuth(String username, String password) {
|
||||
String auth = username + ":" + password;
|
||||
String encodedAuth = Base64.getEncoder().encodeToString(auth.getBytes());
|
||||
myBasicAuth = "Basic " + encodedAuth;
|
||||
}
|
||||
|
||||
public void checkBasicAuthAndMaybeThrow403(HttpServletRequest request, HttpServletResponse response)
|
||||
throws IOException {
|
||||
// Check if the myBasicAuth variable is set, ignore if not.
|
||||
if (myBasicAuth == null || myBasicAuth.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
String authHeader = request.getHeader("Authorization");
|
||||
if (authHeader == null || !authHeader.equals(myBasicAuth)) {
|
||||
response.sendError(HttpServletResponse.SC_FORBIDDEN, "Invalid authentication credentials.");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doGet(HttpServletRequest theRequest, HttpServletResponse theResponse) throws IOException {
|
||||
checkBasicAuthAndMaybeThrow403(theRequest, theResponse);
|
||||
try {
|
||||
String servletPath = theRequest.getServletPath();
|
||||
String requestUri = theRequest.getRequestURI();
|
||||
|
|
|
@ -21,6 +21,8 @@ package ca.uhn.fhir.batch2.jobs.imprt;
|
|||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import ca.uhn.fhir.model.api.annotation.SensitiveNoDisplay;
|
||||
import com.fasterxml.jackson.annotation.JsonFilter;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.validation.constraints.Min;
|
||||
|
@ -36,6 +38,8 @@ import java.util.List;
|
|||
* This class is the parameters model object for starting a
|
||||
* bulk import job.
|
||||
*/
|
||||
@JsonFilter(IModelJson.SENSITIVE_DATA_FILTER_NAME) // TODO GGG eventually consider pushing this up once we have more
|
||||
// experience using it.
|
||||
public class BulkImportJobParameters implements IModelJson {
|
||||
|
||||
@JsonProperty(value = "ndJsonUrls", required = true)
|
||||
|
@ -43,8 +47,9 @@ public class BulkImportJobParameters implements IModelJson {
|
|||
@NotNull(message = "At least one NDJSON URL must be provided")
|
||||
private List<@Pattern(regexp = "^http[s]?://.*", message = "Must be a valid URL") String> myNdJsonUrls;
|
||||
|
||||
@JsonProperty(value = "httpBasicCredentials", access = JsonProperty.Access.WRITE_ONLY, required = false)
|
||||
@JsonProperty(value = "httpBasicCredentials", required = false)
|
||||
@Nullable
|
||||
@SensitiveNoDisplay
|
||||
private String myHttpBasicCredentials;
|
||||
|
||||
@JsonProperty(value = "maxBatchResourceCount", required = false)
|
||||
|
|
|
@ -154,7 +154,7 @@ public class BulkDataImportProviderTest {
|
|||
|
||||
JobInstanceStartRequest startRequest = myStartRequestCaptor.getValue();
|
||||
ourLog.info("Parameters: {}", startRequest.getParameters());
|
||||
assertTrue(startRequest.getParameters().startsWith("{\"ndJsonUrls\":[\"http://example.com/Patient\",\"http://example.com/Observation\"],\"maxBatchResourceCount\":500"));
|
||||
assertTrue(startRequest.getParameters().startsWith("{\"ndJsonUrls\":[\"http://example.com/Patient\",\"http://example.com/Observation\"],\"httpBasicCredentials\":\"admin:password\",\"maxBatchResourceCount\":500,\"partitionId\":{\"allPartitions\":false"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
package ca.uhn.fhir.batch2.jobs.imprt;
|
||||
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
public class ParameterSerializationTest {
|
||||
|
||||
@Test
|
||||
public void testBatchJobParametersSuccessfullySerializeAllFields() {
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
BulkImportJobParameters parameters = new BulkImportJobParameters();
|
||||
parameters.addNdJsonUrl("myurl");
|
||||
parameters.setHttpBasicCredentials("username:password");
|
||||
startRequest.setParameters(parameters);
|
||||
|
||||
BulkImportJobParameters readBackParameters = startRequest.getParameters(BulkImportJobParameters.class);
|
||||
|
||||
assertThat(readBackParameters.getHttpBasicCredentials(), is(equalTo("username:password")));
|
||||
}
|
||||
}
|
|
@ -75,9 +75,10 @@ public class JobStepExecutor<PT extends IModelJson, IT extends IModelJson, OT ex
|
|||
return;
|
||||
}
|
||||
|
||||
if (stepExecutorOutput.getDataSink().firstStepProducedNothing()) {
|
||||
if (stepExecutorOutput.getDataSink().firstStepProducedNothing() && !myDefinition.isLastStepReduction()) {
|
||||
ourLog.info(
|
||||
"First step of job myInstance {} produced no work chunks, marking as completed and setting end date",
|
||||
"First step of job myInstance {} produced no work chunks and last step is not a reduction, "
|
||||
+ "marking as completed and setting end date",
|
||||
myInstanceId);
|
||||
myJobPersistence.updateInstance(myInstance.getInstanceId(), instance -> {
|
||||
instance.setEndTime(new Date());
|
||||
|
|
|
@ -145,6 +145,11 @@ public class JobDefinition<PT extends IModelJson> {
|
|||
return myGatedExecution;
|
||||
}
|
||||
|
||||
public boolean isLastStepReduction() {
|
||||
int stepCount = getSteps().size();
|
||||
return stepCount >= 1 && getSteps().get(stepCount - 1).isReductionStep();
|
||||
}
|
||||
|
||||
public int getStepIndex(String theStepId) {
|
||||
int retVal = myStepIds.indexOf(theStepId);
|
||||
Validate.isTrue(retVal != -1);
|
||||
|
@ -304,9 +309,9 @@ public class JobDefinition<PT extends IModelJson> {
|
|||
throw new ConfigurationException(Msg.code(2106)
|
||||
+ String.format("Job Definition %s has a reducer step but is not gated", myJobDefinitionId));
|
||||
}
|
||||
mySteps.add(new JobDefinitionReductionStep<PT, NIT, OT>(
|
||||
mySteps.add(new JobDefinitionReductionStep<>(
|
||||
theStepId, theStepDescription, theStepWorker, myNextInputType, theOutputType));
|
||||
return new Builder<PT, OT>(
|
||||
return new Builder<>(
|
||||
mySteps,
|
||||
myJobDefinitionId,
|
||||
myJobDefinitionVersion,
|
||||
|
|
|
@ -83,7 +83,7 @@ public class JobInstanceStartRequest implements IModelJson {
|
|||
}
|
||||
|
||||
public JobInstanceStartRequest setParameters(IModelJson theParameters) {
|
||||
myParameters = JsonUtil.serializeOrInvalidRequest(theParameters);
|
||||
myParameters = JsonUtil.serializeWithSensitiveData(theParameters);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -192,12 +192,12 @@ public class InstanceProgress {
|
|||
/**
|
||||
* Transitions from IN_PROGRESS/ERRORED based on chunk statuses.
|
||||
*/
|
||||
public void calculateNewStatus() {
|
||||
public void calculateNewStatus(boolean theLastStepIsReduction) {
|
||||
if (myFailedChunkCount > 0) {
|
||||
myNewStatus = StatusEnum.FAILED;
|
||||
} else if (myErroredChunkCount > 0) {
|
||||
myNewStatus = StatusEnum.ERRORED;
|
||||
} else if (myIncompleteChunkCount == 0 && myCompleteChunkCount > 0) {
|
||||
} else if (myIncompleteChunkCount == 0 && myCompleteChunkCount > 0 && !theLastStepIsReduction) {
|
||||
myNewStatus = StatusEnum.COMPLETED;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,19 +22,26 @@ package ca.uhn.fhir.batch2.progress;
|
|||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.coordinator.JobDefinitionRegistry;
|
||||
import ca.uhn.fhir.batch2.maintenance.JobChunkProgressAccumulator;
|
||||
import ca.uhn.fhir.batch2.model.JobDefinition;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.WorkChunk;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.Logs;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Optional;
|
||||
|
||||
public class JobInstanceProgressCalculator {
|
||||
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
|
||||
private final IJobPersistence myJobPersistence;
|
||||
private final JobChunkProgressAccumulator myProgressAccumulator;
|
||||
private final JobInstanceStatusUpdater myJobInstanceStatusUpdater;
|
||||
private final JobDefinitionRegistry myJobDefinitionRegistry;
|
||||
|
||||
public JobInstanceProgressCalculator(
|
||||
IJobPersistence theJobPersistence,
|
||||
|
@ -42,6 +49,7 @@ public class JobInstanceProgressCalculator {
|
|||
JobDefinitionRegistry theJobDefinitionRegistry) {
|
||||
myJobPersistence = theJobPersistence;
|
||||
myProgressAccumulator = theProgressAccumulator;
|
||||
myJobDefinitionRegistry = theJobDefinitionRegistry;
|
||||
myJobInstanceStatusUpdater = new JobInstanceStatusUpdater(theJobDefinitionRegistry);
|
||||
}
|
||||
|
||||
|
@ -96,8 +104,20 @@ public class JobInstanceProgressCalculator {
|
|||
}
|
||||
|
||||
// wipmb separate status update from stats collection in 6.8
|
||||
instanceProgress.calculateNewStatus();
|
||||
instanceProgress.calculateNewStatus(lastStepIsReduction(instanceId));
|
||||
|
||||
return instanceProgress;
|
||||
}
|
||||
|
||||
private boolean lastStepIsReduction(String theInstanceId) {
|
||||
JobInstance jobInstance = getJobInstance(theInstanceId);
|
||||
JobDefinition<IModelJson> jobDefinition = myJobDefinitionRegistry.getJobDefinitionOrThrowException(jobInstance);
|
||||
return jobDefinition.isLastStepReduction();
|
||||
}
|
||||
|
||||
private JobInstance getJobInstance(String theInstanceId) {
|
||||
Optional<JobInstance> oInstance = myJobPersistence.fetchInstance(theInstanceId);
|
||||
return oInstance.orElseThrow(() ->
|
||||
new InternalErrorException(Msg.code(2486) + "Failed to fetch JobInstance with id: " + theInstanceId));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.mockito.Mock;
|
|||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
@ -30,6 +31,7 @@ import static org.junit.jupiter.api.Assertions.fail;
|
|||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyBoolean;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
|
@ -90,12 +92,16 @@ public class ReductionStepDataSinkTest {
|
|||
String data = "data";
|
||||
StepOutputData stepData = new StepOutputData(data);
|
||||
WorkChunkData<StepOutputData> chunkData = new WorkChunkData<>(stepData);
|
||||
@SuppressWarnings("unchecked")
|
||||
JobDefinition<IModelJson> jobDefinition = mock(JobDefinition.class);
|
||||
|
||||
// when
|
||||
JobInstance instance = JobInstance.fromInstanceId(INSTANCE_ID);
|
||||
instance.setStatus(StatusEnum.FINALIZE);
|
||||
stubUpdateInstanceCallback(instance);
|
||||
when(myJobPersistence.fetchAllWorkChunksIterator(any(), anyBoolean())).thenReturn(Collections.emptyIterator());
|
||||
when(myJobPersistence.fetchInstance(INSTANCE_ID)).thenReturn(Optional.of(instance));
|
||||
when(myJobDefinitionRegistry.getJobDefinitionOrThrowException(instance)).thenReturn(jobDefinition);
|
||||
|
||||
// test
|
||||
myDataSink.accept(chunkData);
|
||||
|
@ -111,6 +117,8 @@ public class ReductionStepDataSinkTest {
|
|||
String data2 = "data2";
|
||||
WorkChunkData<StepOutputData> firstData = new WorkChunkData<>(new StepOutputData(data));
|
||||
WorkChunkData<StepOutputData> secondData = new WorkChunkData<>(new StepOutputData(data2));
|
||||
@SuppressWarnings("unchecked")
|
||||
JobDefinition<IModelJson> jobDefinition = mock(JobDefinition.class);
|
||||
|
||||
ourLogger.setLevel(Level.ERROR);
|
||||
|
||||
|
@ -118,6 +126,8 @@ public class ReductionStepDataSinkTest {
|
|||
instance.setStatus(StatusEnum.FINALIZE);
|
||||
when(myJobPersistence.fetchAllWorkChunksIterator(any(), anyBoolean())).thenReturn(Collections.emptyIterator());
|
||||
stubUpdateInstanceCallback(instance);
|
||||
when(myJobPersistence.fetchInstance(INSTANCE_ID)).thenReturn(Optional.of(instance));
|
||||
when(myJobDefinitionRegistry.getJobDefinitionOrThrowException(instance)).thenReturn(jobDefinition);
|
||||
|
||||
// test
|
||||
myDataSink.accept(firstData);
|
||||
|
@ -136,10 +146,15 @@ public class ReductionStepDataSinkTest {
|
|||
@Test
|
||||
public void accept_noInstanceIdFound_throwsJobExecutionFailed() {
|
||||
// setup
|
||||
JobInstance jobInstance = mock(JobInstance.class);
|
||||
@SuppressWarnings("unchecked")
|
||||
JobDefinition<IModelJson> jobDefinition = (JobDefinition<IModelJson>) mock(JobDefinition.class);
|
||||
String data = "data";
|
||||
WorkChunkData<StepOutputData> chunkData = new WorkChunkData<>(new StepOutputData(data));
|
||||
when(myJobPersistence.updateInstance(any(), any())).thenReturn(false);
|
||||
when(myJobPersistence.fetchAllWorkChunksIterator(any(), anyBoolean())).thenReturn(Collections.emptyIterator());
|
||||
when(myJobPersistence.fetchInstance(INSTANCE_ID)).thenReturn(Optional.of(jobInstance));
|
||||
when(myJobDefinitionRegistry.getJobDefinitionOrThrowException(jobInstance)).thenReturn(jobDefinition);
|
||||
|
||||
// test
|
||||
try {
|
||||
|
@ -151,5 +166,4 @@ public class ReductionStepDataSinkTest {
|
|||
fail("Unexpected exception", anyOtherEx);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -110,4 +110,21 @@ public class BaseTransactionProcessorTest {
|
|||
assertTrue(matchResult, "Failed to find a Regex match using Url '" + matchUrl + "'");
|
||||
}
|
||||
|
||||
@Test
|
||||
void identifierSubstitutionNoQuestionMark() {
|
||||
final IdSubstitutionMap idSubstitutions = new IdSubstitutionMap();
|
||||
idSubstitutions.put(new IdType("Task/urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea"), new IdType("Task/1/history/1"));
|
||||
idSubstitutions.put(new IdType("urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea"), new IdType("Task/1/_history/1"));
|
||||
final String outcome = BaseTransactionProcessor.performIdSubstitutionsInMatchUrl(idSubstitutions, "identifier=http://tempuri.org|2&based-on=urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea");
|
||||
assertEquals("identifier=http://tempuri.org|2&based-on=Task/1", outcome);
|
||||
}
|
||||
|
||||
@Test
|
||||
void identifierSubstitutionYesQuestionMar() {
|
||||
final IdSubstitutionMap idSubstitutions = new IdSubstitutionMap();
|
||||
idSubstitutions.put(new IdType("Task/urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea"), new IdType("Task/1/history/1"));
|
||||
idSubstitutions.put(new IdType("urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea"), new IdType("Task/1/_history/1"));
|
||||
final String outcome = BaseTransactionProcessor.performIdSubstitutionsInMatchUrl(idSubstitutions, "?identifier=http://tempuri.org|2&based-on=urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea");
|
||||
assertEquals("?identifier=http://tempuri.org|2&based-on=Task/1", outcome);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1806,10 +1806,7 @@ public abstract class BaseTransactionProcessor {
|
|||
|
||||
theDaoMethodOutcome.setId(newId);
|
||||
|
||||
IIdType target = theIdSubstitutions.getForSource(newId);
|
||||
if (target != null) {
|
||||
target.setValue(newId.getValue());
|
||||
}
|
||||
theIdSubstitutions.updateTargets(newId);
|
||||
|
||||
if (theDaoMethodOutcome.getOperationOutcome() != null) {
|
||||
IBase responseEntry = entriesToProcess.getResponseBundleEntryWithVersionlessComparison(newId);
|
||||
|
@ -2256,8 +2253,7 @@ public abstract class BaseTransactionProcessor {
|
|||
public static String performIdSubstitutionsInMatchUrl(IdSubstitutionMap theIdSubstitutions, String theMatchUrl) {
|
||||
String matchUrl = theMatchUrl;
|
||||
if (isNotBlank(matchUrl) && !theIdSubstitutions.isEmpty()) {
|
||||
|
||||
int startIdx = matchUrl.indexOf('?');
|
||||
int startIdx = 0;
|
||||
while (startIdx != -1) {
|
||||
|
||||
int endIdx = matchUrl.indexOf('&', startIdx + 1);
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class IdSubstitutionMap {
|
||||
|
@ -87,6 +88,22 @@ public class IdSubstitutionMap {
|
|||
return myMap.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates all targets of the map with a new id value if the input id has
|
||||
* the same ResourceType and IdPart as the target id.
|
||||
*/
|
||||
public void updateTargets(IIdType theNewId) {
|
||||
if (theNewId == null) {
|
||||
return;
|
||||
}
|
||||
String newUnqualifiedVersionLessId = theNewId.toUnqualifiedVersionless().getValue();
|
||||
entrySet().stream()
|
||||
.map(Pair::getValue)
|
||||
.filter(targetId ->
|
||||
Objects.equals(targetId.toUnqualifiedVersionless().getValue(), newUnqualifiedVersionLessId))
|
||||
.forEach(targetId -> targetId.setValue(theNewId.getValue()));
|
||||
}
|
||||
|
||||
private static class Entry {
|
||||
|
||||
private final String myUnversionedId;
|
||||
|
|
|
@ -184,9 +184,13 @@ public class PartitionRunner {
|
|||
}
|
||||
ourLog.info("Slot become available after {}ms", sw.getMillis());
|
||||
};
|
||||
|
||||
// setting corePoolSize and maximumPoolSize to be the same as threadCount
|
||||
// to ensure that the number of allocated threads for the expunge operation does not exceed the configured limit
|
||||
// see ThreadPoolExecutor documentation for details
|
||||
return new ThreadPoolExecutor(
|
||||
threadCount,
|
||||
MAX_POOL_SIZE,
|
||||
threadCount,
|
||||
0L,
|
||||
TimeUnit.MILLISECONDS,
|
||||
executorQueue,
|
||||
|
|
|
@ -33,21 +33,21 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
|||
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
|
||||
import ca.uhn.fhir.jpa.util.ResourceCompartmentUtil;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isEmpty;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
|
@ -117,14 +117,15 @@ public class PatientIdPartitionInterceptor {
|
|||
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ)
|
||||
public RequestPartitionId identifyForRead(
|
||||
ReadPartitionIdRequestDetails theReadDetails, RequestDetails theRequestDetails) {
|
||||
if (isBlank(theReadDetails.getResourceType())) {
|
||||
return provideNonCompartmentMemberTypeResponse(null);
|
||||
}
|
||||
RuntimeResourceDefinition resourceDef = myFhirContext.getResourceDefinition(theReadDetails.getResourceType());
|
||||
List<RuntimeSearchParam> compartmentSps =
|
||||
ResourceCompartmentUtil.getPatientCompartmentSearchParams(resourceDef);
|
||||
if (compartmentSps.isEmpty()) {
|
||||
return provideNonCompartmentMemberTypeResponse(null);
|
||||
|
||||
List<RuntimeSearchParam> compartmentSps = Collections.emptyList();
|
||||
if (!isEmpty(theReadDetails.getResourceType())) {
|
||||
RuntimeResourceDefinition resourceDef =
|
||||
myFhirContext.getResourceDefinition(theReadDetails.getResourceType());
|
||||
compartmentSps = ResourceCompartmentUtil.getPatientCompartmentSearchParams(resourceDef);
|
||||
if (compartmentSps.isEmpty()) {
|
||||
return provideNonCompartmentMemberTypeResponse(null);
|
||||
}
|
||||
}
|
||||
|
||||
//noinspection EnumSwitchStatementWhichMissesCases
|
||||
|
@ -158,11 +159,20 @@ public class PatientIdPartitionInterceptor {
|
|||
}
|
||||
|
||||
break;
|
||||
|
||||
case EXTENDED_OPERATION_SERVER:
|
||||
String extendedOp = theReadDetails.getExtendedOperationName();
|
||||
if (ProviderConstants.OPERATION_EXPORT.equals(extendedOp)) {
|
||||
return provideNonPatientSpecificQueryResponse(theReadDetails);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// nothing
|
||||
}
|
||||
|
||||
if (isEmpty(theReadDetails.getResourceType())) {
|
||||
return provideNonCompartmentMemberTypeResponse(null);
|
||||
}
|
||||
|
||||
// If we couldn't identify a patient ID by the URL, let's try using the
|
||||
// conditional target if we have one
|
||||
if (theReadDetails.getConditionalTargetOrNull() != null) {
|
||||
|
@ -172,15 +182,6 @@ public class PatientIdPartitionInterceptor {
|
|||
return provideNonPatientSpecificQueryResponse(theReadDetails);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private List<RuntimeSearchParam> getCompartmentSearchParams(RuntimeResourceDefinition resourceDef) {
|
||||
return resourceDef.getSearchParams().stream()
|
||||
.filter(param -> param.getParamType() == RestSearchParameterTypeEnum.REFERENCE)
|
||||
.filter(param -> param.getProvidesMembershipInCompartments() != null
|
||||
&& param.getProvidesMembershipInCompartments().contains("Patient"))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private List<String> getResourceIdList(
|
||||
SearchParameterMap theParams, String theParamName, String theResourceType, boolean theExpectOnlyOneBool) {
|
||||
List<String> idParts = new ArrayList<>();
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class IdSubstitutionMapTest {
|
||||
|
||||
private IdSubstitutionMap idSubstitutions;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
idSubstitutions = new IdSubstitutionMap();
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
"Patient/123/_history/3, Patient/123/_history/2",
|
||||
"Patient/123/_history/3, Patient/123"
|
||||
})
|
||||
void testUpdateTargets_inputMatchesTarget_onlyMatchedTargetUpdated(String theInputId, String theTargetId) {
|
||||
idSubstitutions.put(new IdType("urn:uuid:1234"), new IdType(theTargetId));
|
||||
idSubstitutions.put(new IdType("urn:uuid:5000"), new IdType("Patient/5000"));
|
||||
idSubstitutions.put(new IdType("urn:uuid:6000"), new IdType("Patient/6000_history/3"));
|
||||
|
||||
idSubstitutions.updateTargets(new IdType(theInputId));
|
||||
|
||||
assertEquals(theInputId, idSubstitutions.getForSource("urn:uuid:1234").getValue());
|
||||
assertEquals("Patient/5000", idSubstitutions.getForSource("urn:uuid:5000").getValue());
|
||||
assertEquals("Patient/6000_history/3", idSubstitutions.getForSource("urn:uuid:6000").getValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testUpdateTargets_inputMatchesAllTargets_allTargetsUpdated() {
|
||||
idSubstitutions.put(new IdType("urn:uuid:1234"), new IdType("Patient/123/_history/1"));
|
||||
idSubstitutions.put(new IdType("urn:uuid:5000"), new IdType("Patient/123/_history/2"));
|
||||
idSubstitutions.put(new IdType("urn:uuid:6000"), new IdType("Patient/123/_history/4"));
|
||||
|
||||
idSubstitutions.updateTargets(new IdType("Patient/123/_history/3"));
|
||||
|
||||
assertEquals("Patient/123/_history/3", idSubstitutions.getForSource("urn:uuid:1234").getValue());
|
||||
assertEquals("Patient/123/_history/3", idSubstitutions.getForSource("urn:uuid:5000").getValue());
|
||||
assertEquals("Patient/123/_history/3", idSubstitutions.getForSource("urn:uuid:6000").getValue());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(strings = {"Patient/124", "Patient/124/_history/3", "Patient", ""})
|
||||
void testUpdateTargets_noMatchingTarget_noUpdate(String theInputId) {
|
||||
idSubstitutions.put(new IdType("urn:uuid:1234"), new IdType("Patient/123/_history/3"));
|
||||
idSubstitutions.updateTargets(new IdType(theInputId));
|
||||
assertEquals("Patient/123/_history/3", idSubstitutions.getForSource("urn:uuid:1234").getValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testUpdateTargets_nullInputId_noExceptionAndNoUpdate() {
|
||||
idSubstitutions.put(new IdType("urn:uuid:1234"), new IdType("Patient/123/_history/3"));
|
||||
idSubstitutions.updateTargets(null);
|
||||
assertEquals("Patient/123/_history/3", idSubstitutions.getForSource("urn:uuid:1234").getValue());
|
||||
}
|
||||
}
|
|
@ -3,10 +3,12 @@ package ca.uhn.fhir.util.bundle;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
|
||||
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -25,8 +27,9 @@ import org.hl7.fhir.r4.model.UriType;
|
|||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
|
||||
import jakarta.annotation.Nonnull;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
|
@ -555,6 +558,37 @@ public class BundleUtilTest {
|
|||
assertNull(actual);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
// Actual BundleType Expected BundleTypeEnum
|
||||
"TRANSACTION, TRANSACTION",
|
||||
"DOCUMENT, DOCUMENT",
|
||||
"MESSAGE, MESSAGE",
|
||||
"BATCHRESPONSE, BATCH_RESPONSE",
|
||||
"TRANSACTIONRESPONSE, TRANSACTION_RESPONSE",
|
||||
"HISTORY, HISTORY",
|
||||
"SEARCHSET, SEARCHSET",
|
||||
"COLLECTION, COLLECTION"
|
||||
})
|
||||
public void testGetBundleTypeEnum_withKnownBundleTypes_returnsCorrectBundleTypeEnum(Bundle.BundleType theBundleType, BundleTypeEnum theExpectedBundleTypeEnum){
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(theBundleType);
|
||||
assertEquals(theExpectedBundleTypeEnum, BundleUtil.getBundleTypeEnum(ourCtx, bundle));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetBundleTypeEnum_withNullBundleType_returnsNull(){
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.NULL);
|
||||
assertNull(BundleUtil.getBundleTypeEnum(ourCtx, bundle));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetBundleTypeEnum_withNoBundleType_returnsNull(){
|
||||
Bundle bundle = new Bundle();
|
||||
assertNull(BundleUtil.getBundleTypeEnum(ourCtx, bundle));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static Bundle withBundle(Resource theResource) {
|
||||
final Bundle bundle = new Bundle();
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.hl7.fhir.convertors.factory.VersionConvertorFactory_43_50;
|
|||
import org.hl7.fhir.dstu2.model.ValueSet;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.CodeSystem.CodeSystemContentMode;
|
||||
import org.hl7.fhir.r5.model.Resource;
|
||||
import org.hl7.fhir.r5.model.ValueSet.ConceptReferenceComponent;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -408,21 +409,27 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport {
|
|||
|
||||
@Override
|
||||
public IBaseResource fetchCodeSystem(String theSystem) {
|
||||
|
||||
final CodeSystemContentMode content;
|
||||
Map<String, String> map;
|
||||
switch (defaultString(theSystem)) {
|
||||
case COUNTRIES_CODESYSTEM_URL:
|
||||
map = ISO_3166_CODES;
|
||||
content = CodeSystemContentMode.COMPLETE;
|
||||
break;
|
||||
case CURRENCIES_CODESYSTEM_URL:
|
||||
map = ISO_4217_CODES;
|
||||
content = CodeSystemContentMode.COMPLETE;
|
||||
break;
|
||||
case MIMETYPES_CODESYSTEM_URL:
|
||||
map = Collections.emptyMap();
|
||||
content = CodeSystemContentMode.NOTPRESENT;
|
||||
break;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
||||
CodeSystem retVal = new CodeSystem();
|
||||
retVal.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
|
||||
retVal.setContent(content);
|
||||
retVal.setUrl(theSystem);
|
||||
for (Map.Entry<String, String> nextEntry : map.entrySet()) {
|
||||
retVal.addConcept().setCode(nextEntry.getKey()).setDisplay(nextEntry.getValue());
|
||||
|
|
|
@ -745,13 +745,6 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu
|
|||
return expandValueSetR5(theValidationSupportContext, input, theWantSystemUrlAndVersion, theWantCode);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private org.hl7.fhir.r5.model.ValueSet expandValueSetR5(
|
||||
ValidationSupportContext theValidationSupportContext, org.hl7.fhir.r5.model.ValueSet theInput)
|
||||
throws ExpansionCouldNotBeCompletedInternallyException {
|
||||
return expandValueSetR5(theValidationSupportContext, theInput, null, null);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private org.hl7.fhir.r5.model.ValueSet expandValueSetR5(
|
||||
ValidationSupportContext theValidationSupportContext,
|
||||
|
@ -909,20 +902,25 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu
|
|||
|
||||
includeOrExcludeSystemResource = codeSystemLoader.apply(loadedCodeSystemUrl);
|
||||
|
||||
Set<String> wantCodes;
|
||||
if (theInclude.getConcept().isEmpty()) {
|
||||
wantCodes = null;
|
||||
boolean isIncludeWithDeclaredConcepts = !theInclude.getConcept().isEmpty();
|
||||
|
||||
final Set<String> wantCodes;
|
||||
if (isIncludeWithDeclaredConcepts) {
|
||||
wantCodes = theInclude.getConcept().stream()
|
||||
.map(org.hl7.fhir.r5.model.ValueSet.ConceptReferenceComponent::getCode)
|
||||
.collect(Collectors.toSet());
|
||||
} else {
|
||||
wantCodes =
|
||||
theInclude.getConcept().stream().map(t -> t.getCode()).collect(Collectors.toSet());
|
||||
wantCodes = null;
|
||||
}
|
||||
|
||||
boolean ableToHandleCode = false;
|
||||
String failureMessage = null;
|
||||
FailureType failureType = FailureType.OTHER;
|
||||
|
||||
if (includeOrExcludeSystemResource == null
|
||||
|| includeOrExcludeSystemResource.getContent() == Enumerations.CodeSystemContentMode.NOTPRESENT) {
|
||||
boolean isIncludeCodeSystemIgnored = includeOrExcludeSystemResource != null
|
||||
&& includeOrExcludeSystemResource.getContent() == Enumerations.CodeSystemContentMode.NOTPRESENT;
|
||||
|
||||
if (includeOrExcludeSystemResource == null || isIncludeCodeSystemIgnored) {
|
||||
|
||||
if (theWantCode != null) {
|
||||
if (theValidationSupportContext
|
||||
|
@ -971,7 +969,7 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu
|
|||
// If the ValueSet.compose.include has no individual concepts in it, and
|
||||
// we can't find the actual referenced CodeSystem, we have no choice
|
||||
// but to fail
|
||||
if (!theInclude.getConcept().isEmpty()) {
|
||||
if (isIncludeWithDeclaredConcepts) {
|
||||
ableToHandleCode = true;
|
||||
} else {
|
||||
failureMessage = getFailureMessageForMissingOrUnusableCodeSystem(
|
||||
|
@ -998,15 +996,22 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu
|
|||
}
|
||||
}
|
||||
} else {
|
||||
if (isNotBlank(theInclude.getSystem())
|
||||
&& !theInclude.getConcept().isEmpty()
|
||||
&& theInclude.getFilter().isEmpty()
|
||||
&& theInclude.getValueSet().isEmpty()) {
|
||||
theInclude.getConcept().stream()
|
||||
.map(t -> new FhirVersionIndependentConcept(
|
||||
theInclude.getSystem(), t.getCode(), t.getDisplay(), theInclude.getVersion()))
|
||||
.forEach(t -> nextCodeList.add(t));
|
||||
ableToHandleCode = true;
|
||||
boolean isIncludeFromSystem = isNotBlank(theInclude.getSystem())
|
||||
&& theInclude.getValueSet().isEmpty();
|
||||
boolean isIncludeWithFilter = !theInclude.getFilter().isEmpty();
|
||||
if (isIncludeFromSystem && !isIncludeWithFilter) {
|
||||
if (isIncludeWithDeclaredConcepts) {
|
||||
theInclude.getConcept().stream()
|
||||
.map(t -> new FhirVersionIndependentConcept(
|
||||
theInclude.getSystem(),
|
||||
t.getCode(),
|
||||
t.getDisplay(),
|
||||
theInclude.getVersion()))
|
||||
.forEach(nextCodeList::add);
|
||||
ableToHandleCode = true;
|
||||
} else if (isIncludeCodeSystemIgnored) {
|
||||
ableToHandleCode = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!ableToHandleCode) {
|
||||
|
|
|
@ -189,6 +189,7 @@ public class CommonCodeSystemsTerminologyServiceTest extends BaseValidationTestW
|
|||
org.hl7.fhir.r5.model.CodeSystem cs = (org.hl7.fhir.r5.model.CodeSystem) svc.fetchCodeSystem(CommonCodeSystemsTerminologyService.COUNTRIES_CODESYSTEM_URL);
|
||||
assertNotNull(cs);
|
||||
assertEquals(498, cs.getConcept().size());
|
||||
assertEquals(Enumerations.CodeSystemContentMode.COMPLETE, cs.getContent());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -301,6 +302,8 @@ public class CommonCodeSystemsTerminologyServiceTest extends BaseValidationTestW
|
|||
public void testFetchCodeSystem_withMimeType_returnsOk() {
|
||||
CodeSystem cs = (CodeSystem) mySvc.fetchCodeSystem(MIMETYPES_CODESYSTEM_URL);
|
||||
assertNull(cs);
|
||||
assertTrue(cs.getConcept().isEmpty());
|
||||
assertEquals(CodeSystemContentMode.NOTPRESENT, cs.getContent());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
|
|
|
@ -17,8 +17,6 @@ import org.junit.jupiter.api.BeforeEach;
|
|||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
@ -33,10 +31,8 @@ import static org.junit.jupiter.api.Assertions.assertNull;
|
|||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class InMemoryTerminologyServerValidationSupportTest extends BaseValidationTestWithInlineMocks {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(InMemoryTerminologyServerValidationSupportTest.class);
|
||||
private InMemoryTerminologyServerValidationSupport mySvc;
|
||||
private FhirContext myCtx = FhirContext.forR4();
|
||||
private final FhirContext myCtx = FhirContext.forR4();
|
||||
private DefaultProfileValidationSupport myDefaultSupport;
|
||||
private ValidationSupportChain myChain;
|
||||
private PrePopulatedValidationSupport myPrePopulated;
|
||||
|
@ -54,8 +50,153 @@ public class InMemoryTerminologyServerValidationSupportTest extends BaseValidati
|
|||
myDefaultSupport.fetchCodeSystem("http://foo");
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(strings = {
|
||||
CommonCodeSystemsTerminologyService.MIMETYPES_VALUESET_URL,
|
||||
CommonCodeSystemsTerminologyService.CURRENCIES_VALUESET_URL,
|
||||
CommonCodeSystemsTerminologyService.LANGUAGES_VALUESET_URL
|
||||
})
|
||||
public void testExpandValueSet_commonVS_expandOk(String theValueSet) {
|
||||
ValueSet vs = (ValueSet) myChain.fetchValueSet(theValueSet);
|
||||
assertNotNull(vs);
|
||||
|
||||
ValidationSupportContext valCtx = new ValidationSupportContext(myChain);
|
||||
|
||||
IValidationSupport.ValueSetExpansionOutcome expansion = mySvc.expandValueSet(valCtx, new ValueSetExpansionOptions(), vs);
|
||||
assertNotNull(expansion);
|
||||
assertNull(expansion.getError());
|
||||
ValueSet valueSet = (ValueSet) expansion.getValueSet();
|
||||
assertNotNull(valueSet);
|
||||
assertNotNull(valueSet.getExpansion());
|
||||
}
|
||||
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(strings = {
|
||||
CommonCodeSystemsTerminologyService.MIMETYPES_CODESYSTEM_URL,
|
||||
CommonCodeSystemsTerminologyService.COUNTRIES_CODESYSTEM_URL,
|
||||
CommonCodeSystemsTerminologyService.CURRENCIES_CODESYSTEM_URL
|
||||
})
|
||||
public void testExpandValueSet_customVSBasedOnCommonCS_expandOk(String theCodeSystem) {
|
||||
ValueSet vs = new ValueSet();
|
||||
vs.setId("mimetype");
|
||||
vs.setUrl("http://example.com/mimetype");
|
||||
vs.setVersion("1.0");
|
||||
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
ValueSet.ConceptSetComponent vsInclude = vs.getCompose().addInclude();
|
||||
vsInclude.setSystem(theCodeSystem);
|
||||
myPrePopulated.addValueSet(vs);
|
||||
|
||||
vs = (ValueSet) myChain.fetchValueSet(vs.getUrl());
|
||||
assertNotNull(vs);
|
||||
|
||||
ValidationSupportContext valCtx = new ValidationSupportContext(myChain);
|
||||
|
||||
IValidationSupport.ValueSetExpansionOutcome expansion = mySvc.expandValueSet(valCtx, new ValueSetExpansionOptions(), vs);
|
||||
assertNotNull(expansion);
|
||||
assertNull(expansion.getError());
|
||||
ValueSet valueSet = (ValueSet) expansion.getValueSet();
|
||||
assertNotNull(valueSet);
|
||||
assertNotNull(valueSet.getExpansion());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateCodeWithInferredSystem_CommonCodeSystemsCs_BuiltInVs() {
|
||||
public void testValidateCode_mimetypeVSRandomCode_returnsOk() {
|
||||
final String codeSystem = CommonCodeSystemsTerminologyService.MIMETYPES_CODESYSTEM_URL;
|
||||
final String valueSetUrl = CommonCodeSystemsTerminologyService.MIMETYPES_VALUESET_URL;
|
||||
|
||||
final String code = "someRandomCode";
|
||||
|
||||
ValidationSupportContext valCtx = new ValidationSupportContext(myChain);
|
||||
ConceptValidationOptions options = new ConceptValidationOptions();
|
||||
|
||||
IValidationSupport.CodeValidationResult outcome = mySvc.validateCode(valCtx, options, codeSystem, code, null, valueSetUrl);
|
||||
assertNotNull(outcome);
|
||||
assertTrue(outcome.isOk());
|
||||
assertEquals(code, outcome.getCode());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateCode_customMimetypeVSRandomCode_returnsOk() {
|
||||
final String codeSystem = CommonCodeSystemsTerminologyService.MIMETYPES_CODESYSTEM_URL;
|
||||
final String code = "someRandomCode";
|
||||
|
||||
ValueSet vs = new ValueSet();
|
||||
vs.setId("mimetype");
|
||||
vs.setUrl("http://example.com/mimetype");
|
||||
vs.setVersion("1.0");
|
||||
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
ValueSet.ConceptSetComponent vsInclude = vs.getCompose().addInclude();
|
||||
vsInclude.setSystem(codeSystem);
|
||||
myPrePopulated.addValueSet(vs);
|
||||
|
||||
ValidationSupportContext valCtx = new ValidationSupportContext(myChain);
|
||||
ConceptValidationOptions options = new ConceptValidationOptions();
|
||||
|
||||
IValidationSupport.CodeValidationResult outcome = mySvc.validateCode(valCtx, options, codeSystem, code, null, vs.getUrl());
|
||||
assertNotNull(outcome);
|
||||
assertTrue(outcome.isOk());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateCode_customMimetypeVSCodeInVS_returnsOk() {
|
||||
String codeSystem = CommonCodeSystemsTerminologyService.MIMETYPES_CODESYSTEM_URL;
|
||||
|
||||
final String code = "someRandomCode";
|
||||
final String display = "Display " + code;
|
||||
|
||||
ValueSet vs = new ValueSet();
|
||||
vs.setId("example-vs");
|
||||
vs.setUrl("http://example.com/example-vs");
|
||||
vs.setVersion("1.0");
|
||||
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
ValueSet.ConceptSetComponent vsInclude = vs.getCompose().addInclude();
|
||||
vsInclude.setSystem(codeSystem);
|
||||
vsInclude.addConcept().setCode(code).setDisplay(display);
|
||||
myPrePopulated.addValueSet(vs);
|
||||
|
||||
ValidationSupportContext valCtx = new ValidationSupportContext(myChain);
|
||||
ConceptValidationOptions options = new ConceptValidationOptions();
|
||||
|
||||
IValidationSupport.CodeValidationResult outcome = mySvc.validateCode(valCtx, options, codeSystem, code, null, vs.getUrl());
|
||||
assertNotNull(outcome);
|
||||
assertTrue(outcome.isOk());
|
||||
assertEquals(code, outcome.getCode());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(strings = {
|
||||
CommonCodeSystemsTerminologyService.MIMETYPES_CODESYSTEM_URL,
|
||||
CommonCodeSystemsTerminologyService.COUNTRIES_CODESYSTEM_URL,
|
||||
CommonCodeSystemsTerminologyService.CURRENCIES_VALUESET_URL,
|
||||
CommonCodeSystemsTerminologyService.LANGUAGES_CODESYSTEM_URL,
|
||||
CommonCodeSystemsTerminologyService.UCUM_CODESYSTEM_URL
|
||||
})
|
||||
public void testValidateCode_customMimetypeVSCodeNotInVS_returnsError(String theCodeSystem) {
|
||||
final String code = "someRandomCode";
|
||||
final String codeToValidate = "otherCode";
|
||||
|
||||
ValueSet vs = new ValueSet();
|
||||
vs.setId("mimetype");
|
||||
vs.setUrl("http://example.com/mimetype");
|
||||
vs.setVersion("1.0");
|
||||
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
ValueSet.ConceptSetComponent vsInclude = vs.getCompose().addInclude();
|
||||
vsInclude.setSystem(theCodeSystem);
|
||||
vsInclude.addConcept().setCode(code).setDisplay("Display " + code);
|
||||
myPrePopulated.addValueSet(vs);
|
||||
|
||||
ValidationSupportContext valCtx = new ValidationSupportContext(myChain);
|
||||
ConceptValidationOptions options = new ConceptValidationOptions();
|
||||
|
||||
IValidationSupport.CodeValidationResult outcome = mySvc.validateCode(valCtx, options, theCodeSystem, codeToValidate, null, vs.getUrl());
|
||||
assertNotNull(outcome);
|
||||
assertFalse(outcome.isOk());
|
||||
assertEquals("Unknown code '" + theCodeSystem + "#" + codeToValidate + "' for in-memory expansion of ValueSet '" + vs.getUrl() + "'", outcome.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateCodeWithInferredSystem_CommonCs_BuiltInVs() {
|
||||
|
||||
ValidationSupportContext valCtx = new ValidationSupportContext(myChain);
|
||||
ConceptValidationOptions options = new ConceptValidationOptions().setInferSystem(true);
|
||||
|
@ -279,9 +420,6 @@ public class InMemoryTerminologyServerValidationSupportTest extends BaseValidati
|
|||
assertEquals(null, outcome.getCodeSystemVersion());
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testExpandValueSet_VsUsesVersionedSystem_CsIsFragmentWithoutCode() {
|
||||
CodeSystem cs = new CodeSystem();
|
||||
|
|
Loading…
Reference in New Issue