mirror of
https://github.com/hapifhir/hapi-fhir.git
synced 2025-02-18 02:45:07 +00:00
Merge remote-tracking branch 'origin/rel_7_0' into do-20240202-core-bump-6-2-16
This commit is contained in:
commit
8ddec23ba5
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -91,7 +91,7 @@ public enum BundleTypeEnum {
|
||||
/**
|
||||
* Returns the enumerated value associated with this code
|
||||
*/
|
||||
public BundleTypeEnum forCode(String theCode) {
|
||||
public static BundleTypeEnum forCode(String theCode) {
|
||||
BundleTypeEnum retVal = CODE_TO_ENUM.get(theCode);
|
||||
return retVal;
|
||||
}
|
||||
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.model.primitive.CodeDt;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.ICompositeType;
|
||||
@ -41,8 +42,8 @@ public class AttachmentUtil {
|
||||
return getOrCreateChild(theContext, theAttachment, "data", "base64Binary");
|
||||
}
|
||||
|
||||
public static IPrimitiveType<String> getOrCreateContentType(FhirContext theContext, ICompositeType theAttachment) {
|
||||
return getOrCreateChild(theContext, theAttachment, "contentType", "string");
|
||||
public static IPrimitiveType<CodeDt> getOrCreateContentType(FhirContext theContext, ICompositeType theAttachment) {
|
||||
return getOrCreateChild(theContext, theAttachment, "contentType", "code");
|
||||
}
|
||||
|
||||
public static IPrimitiveType<String> getOrCreateUrl(FhirContext theContext, ICompositeType theAttachment) {
|
||||
|
@ -251,10 +251,22 @@ public class BundleBuilder {
|
||||
* @param theResource The resource to create
|
||||
*/
|
||||
public CreateBuilder addTransactionCreateEntry(IBaseResource theResource) {
|
||||
return addTransactionCreateEntry(theResource, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an entry containing an create (POST) request.
|
||||
* Also sets the Bundle.type value to "transaction" if it is not already set.
|
||||
*
|
||||
* @param theResource The resource to create
|
||||
* @param theFullUrl The fullUrl to attach to the entry. If null, will default to the resource ID.
|
||||
*/
|
||||
public CreateBuilder addTransactionCreateEntry(IBaseResource theResource, @Nullable String theFullUrl) {
|
||||
setBundleField("type", "transaction");
|
||||
|
||||
IBase request =
|
||||
addEntryAndReturnRequest(theResource, theResource.getIdElement().getValue());
|
||||
IBase request = addEntryAndReturnRequest(
|
||||
theResource,
|
||||
theFullUrl != null ? theFullUrl : theResource.getIdElement().getValue());
|
||||
|
||||
String resourceType = myContext.getResourceType(theResource);
|
||||
|
||||
|
@ -26,6 +26,7 @@ import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
@ -235,6 +236,14 @@ public class BundleUtil {
|
||||
return null;
|
||||
}
|
||||
|
||||
public static BundleTypeEnum getBundleTypeEnum(FhirContext theContext, IBaseBundle theBundle) {
|
||||
String bundleTypeCode = BundleUtil.getBundleType(theContext, theBundle);
|
||||
if (isBlank(bundleTypeCode)) {
|
||||
return null;
|
||||
}
|
||||
return BundleTypeEnum.forCode(bundleTypeCode);
|
||||
}
|
||||
|
||||
public static void setBundleType(FhirContext theContext, IBaseBundle theBundle, String theType) {
|
||||
RuntimeResourceDefinition def = theContext.getResourceDefinition(theBundle);
|
||||
BaseRuntimeChildDefinition entryChild = def.getChildByName("type");
|
||||
|
@ -135,6 +135,7 @@ public enum VersionEnum {
|
||||
V6_11_0,
|
||||
|
||||
V7_0_0,
|
||||
|
||||
V7_1_0,
|
||||
V7_2_0;
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
@ -12,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5537
|
||||
title: "Calling the method getOrCreateContentType in AttachmentUtil on an attachment with no content type would throw exception because contentType is a code not a string.
|
||||
This fixes the function to create an empty code as expected"
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5547
|
||||
title: "The addition of the indexes `idx_sp_uri_hash_identity_pattern_ops` and `idx_sp_string_hash_nrm_pattern_ops` could occasionally timeout during migration in Postgresql on large databases, leaving the migration table in a failed state, and Smile CDR unable to boot.
|
||||
Now existence of the index is checked before attempting to add it again."
|
@ -0,0 +1,7 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5619
|
||||
jira: SMILE-7909
|
||||
title: "Previously, when a transaction was posted with a resource that had placeholder references and auto versioning
|
||||
references enabled for that path, if the target resource was included in the Bundle but not modified, the reference was
|
||||
saved with a version number that didn't exist. This has been fixed."
|
@ -0,0 +1,4 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5621
|
||||
title: "Fixed a deadlock in resource conditional create."
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5623
|
||||
title: "Previously, searches that used more than one chained `Bundle` `SearchParameter` (i.e. `Composition`) were only
|
||||
adding one condition to the underlying SQL query which resulted in incorrect search results. This has been fixed."
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5626
|
||||
title: "Previously, an exception could be thrown by the container when executing a contextClosedEvent on the
|
||||
Scheduler Service. This issue has been fixed."
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5632
|
||||
title: "Previously bulk export operation was returning an empty response when no resources matched the request, which
|
||||
didn't comply with [HL7 HAPI IG](https://hl7.org/fhir/uv/bulkdata/export/index.html#response---complete-status).
|
||||
This has been corrected."
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5633
|
||||
title: "Smile failed to save resources running on Oracle when installed from 2023-02 or earlier.
|
||||
This has been fixed."
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5634
|
||||
title: "Previously, expanding a 'ValueSet' with no concepts based on system `urn:ietf:bcp:13` would fail with
|
||||
`ExpansionCouldNotBeCompletedInternallyException`. This has been fixed."
|
@ -0,0 +1,7 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5636
|
||||
jira: SMILE-7648
|
||||
title: "Previously, the number of threads allocated to the $expunge operation in certain cases could be more
|
||||
than configured, this would cause hundreds of threads to be created and all available database connections
|
||||
to be consumed. This has been fixed."
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5640
|
||||
jira: SMILE-7977
|
||||
title: "Clinical reasoning version bump to address reported 'null pointer' error that is encountered when running $evaluate-measure against a measure with an omitted measure.group.population.id"
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5642
|
||||
title: "A non-superuser with correct permissions encounters HAPI-0339 when POSTING a transaction Bundle with a PATCH.
|
||||
This has been fixed."
|
@ -0,0 +1,8 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5644
|
||||
title: "Previously, searching for `Bundle` resources with read all `Bundle` resources permissions, returned an
|
||||
HTTP 403 Forbidden error. This was because the `AuthorizationInterceptor` applied permissions to the resources inside
|
||||
the `Bundle`, instead of the `Bundle` itself. This has been fixed and permissions are no longer applied to the resources
|
||||
inside a `Bundle` of type `document`, `message`, or `collection` for `Bundle` requests."
|
||||
|
@ -0,0 +1,4 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5649
|
||||
title: "Change database upgrade script to avoid holding locks while adding indices."
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 5651
|
||||
jira: SMILE-7855
|
||||
title: "Previously, conditional creates would fail with HAPI-0929 errors if there was no preceding '?'.
|
||||
This has been fixed."
|
@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -28,13 +28,13 @@ import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import jakarta.annotation.PreDestroy;
|
||||
import org.quartz.JobKey;
|
||||
import org.quartz.SchedulerException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.event.ContextClosedEvent;
|
||||
import org.springframework.context.event.ContextRefreshedEvent;
|
||||
import org.springframework.context.event.EventListener;
|
||||
import org.springframework.core.env.Environment;
|
||||
@ -177,7 +177,7 @@ public abstract class BaseSchedulerServiceImpl implements ISchedulerService {
|
||||
values.forEach(t -> t.scheduleJobs(this));
|
||||
}
|
||||
|
||||
@EventListener(ContextClosedEvent.class)
|
||||
@PreDestroy
|
||||
public void stop() {
|
||||
ourLog.info("Shutting down task scheduler...");
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -75,4 +75,8 @@ public class HibernatePropertiesProvider {
|
||||
public DataSource getDataSource() {
|
||||
return myEntityManagerFactory.getDataSource();
|
||||
}
|
||||
|
||||
public boolean isOracleDialect() {
|
||||
return getDialect() instanceof org.hibernate.dialect.OracleDialect;
|
||||
}
|
||||
}
|
||||
|
@ -51,6 +51,7 @@ import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
|
||||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.JpaStorageResourceParser;
|
||||
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
|
||||
import ca.uhn.fhir.jpa.dao.ResourceHistoryCalculator;
|
||||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceModifiedDao;
|
||||
@ -869,4 +870,10 @@ public class JpaConfig {
|
||||
public IMetaTagSorter metaTagSorter() {
|
||||
return new MetaTagSorterAlphabetical();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ResourceHistoryCalculator resourceHistoryCalculator(
|
||||
FhirContext theFhirContext, HibernatePropertiesProvider theHibernatePropertiesProvider) {
|
||||
return new ResourceHistoryCalculator(theFhirContext, theHibernatePropertiesProvider.isOracleDialect());
|
||||
}
|
||||
}
|
||||
|
@ -85,7 +85,6 @@ import ca.uhn.fhir.model.api.TagList;
|
||||
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
@ -105,8 +104,6 @@ import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.google.common.hash.HashCode;
|
||||
import com.google.common.hash.HashFunction;
|
||||
import com.google.common.hash.Hashing;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
@ -264,6 +261,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionManager;
|
||||
|
||||
@Autowired
|
||||
protected ResourceHistoryCalculator myResourceHistoryCalculator;
|
||||
|
||||
protected final CodingSpy myCodingSpy = new CodingSpy();
|
||||
|
||||
@VisibleForTesting
|
||||
@ -277,6 +277,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
mySearchParamPresenceSvc = theSearchParamPresenceSvc;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setResourceHistoryCalculator(ResourceHistoryCalculator theResourceHistoryCalculator) {
|
||||
myResourceHistoryCalculator = theResourceHistoryCalculator;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IInterceptorBroadcaster getInterceptorBroadcaster() {
|
||||
return myInterceptorBroadcaster;
|
||||
@ -643,6 +648,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
theEntity.setResourceType(toResourceName(theResource));
|
||||
}
|
||||
|
||||
byte[] resourceBinary;
|
||||
String resourceText;
|
||||
ResourceEncodingEnum encoding;
|
||||
boolean changed = false;
|
||||
@ -659,6 +665,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
if (address != null) {
|
||||
|
||||
encoding = ResourceEncodingEnum.ESR;
|
||||
resourceBinary = null;
|
||||
resourceText = address.getProviderId() + ":" + address.getLocation();
|
||||
changed = true;
|
||||
|
||||
@ -675,10 +682,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
|
||||
theEntity.setFhirVersion(myContext.getVersion().getVersion());
|
||||
|
||||
HashFunction sha256 = Hashing.sha256();
|
||||
resourceText = encodeResource(theResource, encoding, excludeElements, myContext);
|
||||
encoding = ResourceEncodingEnum.JSON;
|
||||
HashCode hashCode = sha256.hashUnencodedChars(resourceText);
|
||||
// TODO: LD: Once 2024-02 it out the door we should consider further refactoring here to move
|
||||
// more of this logic within the calculator and eliminate more local variables
|
||||
final ResourceHistoryState calculate = myResourceHistoryCalculator.calculateResourceHistoryState(
|
||||
theResource, encoding, excludeElements);
|
||||
|
||||
resourceText = calculate.getResourceText();
|
||||
resourceBinary = calculate.getResourceBinary();
|
||||
encoding = calculate.getEncoding(); // This may be a no-op
|
||||
final HashCode hashCode = calculate.getHashCode();
|
||||
|
||||
String hashSha256 = hashCode.toString();
|
||||
if (!hashSha256.equals(theEntity.getHashSha256())) {
|
||||
@ -696,6 +708,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
} else {
|
||||
|
||||
encoding = null;
|
||||
resourceBinary = null;
|
||||
resourceText = null;
|
||||
}
|
||||
|
||||
@ -713,6 +726,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
changed = true;
|
||||
}
|
||||
|
||||
resourceBinary = null;
|
||||
resourceText = null;
|
||||
encoding = ResourceEncodingEnum.DEL;
|
||||
}
|
||||
@ -737,13 +751,17 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) {
|
||||
changed = true;
|
||||
} else {
|
||||
changed = !StringUtils.equals(currentHistoryVersion.getResourceTextVc(), resourceText);
|
||||
// TODO: LD: Once 2024-02 it out the door we should consider further refactoring here to move
|
||||
// more of this logic within the calculator and eliminate more local variables
|
||||
changed = myResourceHistoryCalculator.isResourceHistoryChanged(
|
||||
currentHistoryVersion, resourceBinary, resourceText);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
EncodedResource retVal = new EncodedResource();
|
||||
retVal.setEncoding(encoding);
|
||||
retVal.setResourceBinary(resourceBinary);
|
||||
retVal.setResourceText(resourceText);
|
||||
retVal.setChanged(changed);
|
||||
|
||||
@ -1393,8 +1411,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
ResourceEncodingEnum encoding = myStorageSettings.getResourceEncoding();
|
||||
List<String> excludeElements = new ArrayList<>(8);
|
||||
getExcludedElements(historyEntity.getResourceType(), excludeElements, theResource.getMeta());
|
||||
String encodedResourceString = encodeResource(theResource, encoding, excludeElements, myContext);
|
||||
boolean changed = !StringUtils.equals(historyEntity.getResourceTextVc(), encodedResourceString);
|
||||
String encodedResourceString =
|
||||
myResourceHistoryCalculator.encodeResource(theResource, encoding, excludeElements);
|
||||
byte[] resourceBinary = ResourceHistoryCalculator.getResourceBinary(encoding, encodedResourceString);
|
||||
final boolean changed = myResourceHistoryCalculator.isResourceHistoryChanged(
|
||||
historyEntity, resourceBinary, encodedResourceString);
|
||||
|
||||
historyEntity.setUpdated(theTransactionDetails.getTransactionDate());
|
||||
|
||||
@ -1406,14 +1427,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
return historyEntity;
|
||||
}
|
||||
|
||||
populateEncodedResource(encodedResource, encodedResourceString, ResourceEncodingEnum.JSON);
|
||||
myResourceHistoryCalculator.populateEncodedResource(
|
||||
encodedResource, encodedResourceString, resourceBinary, encoding);
|
||||
}
|
||||
|
||||
/*
|
||||
* Save the resource itself to the resourceHistoryTable
|
||||
*/
|
||||
historyEntity = myEntityManager.merge(historyEntity);
|
||||
historyEntity.setEncoding(encodedResource.getEncoding());
|
||||
historyEntity.setResource(encodedResource.getResourceBinary());
|
||||
historyEntity.setResourceTextVc(encodedResource.getResourceText());
|
||||
myResourceHistoryTableDao.save(historyEntity);
|
||||
|
||||
@ -1423,8 +1445,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
}
|
||||
|
||||
private void populateEncodedResource(
|
||||
EncodedResource encodedResource, String encodedResourceString, ResourceEncodingEnum theEncoding) {
|
||||
EncodedResource encodedResource,
|
||||
String encodedResourceString,
|
||||
byte[] theResourceBinary,
|
||||
ResourceEncodingEnum theEncoding) {
|
||||
encodedResource.setResourceText(encodedResourceString);
|
||||
encodedResource.setResourceBinary(theResourceBinary);
|
||||
encodedResource.setEncoding(theEncoding);
|
||||
}
|
||||
|
||||
@ -1489,6 +1515,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
}
|
||||
|
||||
historyEntry.setEncoding(theChanged.getEncoding());
|
||||
historyEntry.setResource(theChanged.getResourceBinary());
|
||||
historyEntry.setResourceTextVc(theChanged.getResourceText());
|
||||
|
||||
ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId());
|
||||
@ -1926,16 +1953,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
return resourceText;
|
||||
}
|
||||
|
||||
public static String encodeResource(
|
||||
IBaseResource theResource,
|
||||
ResourceEncodingEnum theEncoding,
|
||||
List<String> theExcludeElements,
|
||||
FhirContext theContext) {
|
||||
IParser parser = theEncoding.newParser(theContext);
|
||||
parser.setDontEncodeElements(theExcludeElements);
|
||||
return parser.encodeResourceToString(theResource);
|
||||
}
|
||||
|
||||
private static String parseNarrativeTextIntoWords(IBaseResource theResource) {
|
||||
|
||||
StringBuilder b = new StringBuilder();
|
||||
|
@ -1709,17 +1709,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC
|
||||
|| historyEntity.getEncoding() == ResourceEncodingEnum.JSON) {
|
||||
byte[] resourceBytes = historyEntity.getResource();
|
||||
|
||||
// Always migrate data out of the bytes column
|
||||
if (resourceBytes != null) {
|
||||
String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding());
|
||||
ourLog.debug(
|
||||
"Storing text of resource {} version {} as inline VARCHAR",
|
||||
entity.getResourceId(),
|
||||
historyEntity.getVersion());
|
||||
historyEntity.setResourceTextVc(resourceText);
|
||||
historyEntity.setEncoding(ResourceEncodingEnum.JSON);
|
||||
changed = true;
|
||||
if (myResourceHistoryCalculator.conditionallyAlterHistoryEntity(entity, historyEntity, resourceText)) {
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
|
||||
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
class EncodedResource {
|
||||
|
||||
private boolean myChanged;
|
||||
private byte[] myResource;
|
||||
private ResourceEncodingEnum myEncoding;
|
||||
private String myResourceText;
|
||||
|
||||
@ -35,6 +36,14 @@ class EncodedResource {
|
||||
myEncoding = theEncoding;
|
||||
}
|
||||
|
||||
public byte[] getResourceBinary() {
|
||||
return myResource;
|
||||
}
|
||||
|
||||
public void setResourceBinary(byte[] theResource) {
|
||||
myResource = theResource;
|
||||
}
|
||||
|
||||
public boolean isChanged() {
|
||||
return myChanged;
|
||||
}
|
||||
|
@ -0,0 +1,153 @@
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import com.google.common.hash.HashCode;
|
||||
import com.google.common.hash.HashFunction;
|
||||
import com.google.common.hash.Hashing;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Responsible for various resource history-centric and {@link FhirContext} aware operations called by
|
||||
* {@link BaseHapiFhirDao} or {@link BaseHapiFhirResourceDao} that require knowledge of whether an Oracle database is
|
||||
* being used.
|
||||
*/
|
||||
public class ResourceHistoryCalculator {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceHistoryCalculator.class);
|
||||
private static final HashFunction SHA_256 = Hashing.sha256();
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
private final boolean myIsOracleDialect;
|
||||
|
||||
public ResourceHistoryCalculator(FhirContext theFhirContext, boolean theIsOracleDialect) {
|
||||
myFhirContext = theFhirContext;
|
||||
myIsOracleDialect = theIsOracleDialect;
|
||||
}
|
||||
|
||||
ResourceHistoryState calculateResourceHistoryState(
|
||||
IBaseResource theResource, ResourceEncodingEnum theEncoding, List<String> theExcludeElements) {
|
||||
final String encodedResource = encodeResource(theResource, theEncoding, theExcludeElements);
|
||||
final byte[] resourceBinary;
|
||||
final String resourceText;
|
||||
final ResourceEncodingEnum encoding;
|
||||
final HashCode hashCode;
|
||||
|
||||
if (myIsOracleDialect) {
|
||||
resourceText = null;
|
||||
resourceBinary = getResourceBinary(theEncoding, encodedResource);
|
||||
encoding = theEncoding;
|
||||
hashCode = SHA_256.hashBytes(resourceBinary);
|
||||
} else {
|
||||
resourceText = encodedResource;
|
||||
resourceBinary = null;
|
||||
encoding = ResourceEncodingEnum.JSON;
|
||||
hashCode = SHA_256.hashUnencodedChars(encodedResource);
|
||||
}
|
||||
|
||||
return new ResourceHistoryState(resourceText, resourceBinary, encoding, hashCode);
|
||||
}
|
||||
|
||||
boolean conditionallyAlterHistoryEntity(
|
||||
ResourceTable theEntity, ResourceHistoryTable theHistoryEntity, String theResourceText) {
|
||||
if (!myIsOracleDialect) {
|
||||
ourLog.debug(
|
||||
"Storing text of resource {} version {} as inline VARCHAR",
|
||||
theEntity.getResourceId(),
|
||||
theHistoryEntity.getVersion());
|
||||
theHistoryEntity.setResourceTextVc(theResourceText);
|
||||
theHistoryEntity.setResource(null);
|
||||
theHistoryEntity.setEncoding(ResourceEncodingEnum.JSON);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean isResourceHistoryChanged(
|
||||
ResourceHistoryTable theCurrentHistoryVersion,
|
||||
@Nullable byte[] theResourceBinary,
|
||||
@Nullable String resourceText) {
|
||||
if (myIsOracleDialect) {
|
||||
return !Arrays.equals(theCurrentHistoryVersion.getResource(), theResourceBinary);
|
||||
}
|
||||
|
||||
return !StringUtils.equals(theCurrentHistoryVersion.getResourceTextVc(), resourceText);
|
||||
}
|
||||
|
||||
String encodeResource(
|
||||
IBaseResource theResource, ResourceEncodingEnum theEncoding, List<String> theExcludeElements) {
|
||||
final IParser parser = theEncoding.newParser(myFhirContext);
|
||||
parser.setDontEncodeElements(theExcludeElements);
|
||||
return parser.encodeResourceToString(theResource);
|
||||
}
|
||||
|
||||
/**
|
||||
* helper for returning the encoded byte array of the input resource string based on the theEncoding.
|
||||
*
|
||||
* @param theEncoding the theEncoding to used
|
||||
* @param theEncodedResource the resource to encode
|
||||
* @return byte array of the resource
|
||||
*/
|
||||
@Nonnull
|
||||
static byte[] getResourceBinary(ResourceEncodingEnum theEncoding, String theEncodedResource) {
|
||||
switch (theEncoding) {
|
||||
case JSON:
|
||||
return theEncodedResource.getBytes(StandardCharsets.UTF_8);
|
||||
case JSONC:
|
||||
return GZipUtil.compress(theEncodedResource);
|
||||
default:
|
||||
return new byte[0];
|
||||
}
|
||||
}
|
||||
|
||||
void populateEncodedResource(
|
||||
EncodedResource theEncodedResource,
|
||||
String theEncodedResourceString,
|
||||
@Nullable byte[] theResourceBinary,
|
||||
ResourceEncodingEnum theEncoding) {
|
||||
if (myIsOracleDialect) {
|
||||
populateEncodedResourceInner(theEncodedResource, null, theResourceBinary, theEncoding);
|
||||
} else {
|
||||
populateEncodedResourceInner(theEncodedResource, theEncodedResourceString, null, ResourceEncodingEnum.JSON);
|
||||
}
|
||||
}
|
||||
|
||||
private void populateEncodedResourceInner(
|
||||
EncodedResource encodedResource,
|
||||
String encodedResourceString,
|
||||
byte[] theResourceBinary,
|
||||
ResourceEncodingEnum theEncoding) {
|
||||
encodedResource.setResourceText(encodedResourceString);
|
||||
encodedResource.setResourceBinary(theResourceBinary);
|
||||
encodedResource.setEncoding(theEncoding);
|
||||
}
|
||||
}
|
@ -0,0 +1,105 @@
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
import com.google.common.hash.HashCode;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.StringJoiner;
|
||||
|
||||
/**
|
||||
* POJO to contain the results of {@link ResourceHistoryCalculator#calculateResourceHistoryState(IBaseResource, ResourceEncodingEnum, List)}
|
||||
*/
|
||||
public class ResourceHistoryState {
|
||||
@Nullable
|
||||
private final String myResourceText;
|
||||
|
||||
@Nullable
|
||||
private final byte[] myResourceBinary;
|
||||
|
||||
private final ResourceEncodingEnum myEncoding;
|
||||
private final HashCode myHashCode;
|
||||
|
||||
public ResourceHistoryState(
|
||||
@Nullable String theResourceText,
|
||||
@Nullable byte[] theResourceBinary,
|
||||
ResourceEncodingEnum theEncoding,
|
||||
HashCode theHashCode) {
|
||||
myResourceText = theResourceText;
|
||||
myResourceBinary = theResourceBinary;
|
||||
myEncoding = theEncoding;
|
||||
myHashCode = theHashCode;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getResourceText() {
|
||||
return myResourceText;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public byte[] getResourceBinary() {
|
||||
return myResourceBinary;
|
||||
}
|
||||
|
||||
public ResourceEncodingEnum getEncoding() {
|
||||
return myEncoding;
|
||||
}
|
||||
|
||||
public HashCode getHashCode() {
|
||||
return myHashCode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object theO) {
|
||||
if (this == theO) {
|
||||
return true;
|
||||
}
|
||||
if (theO == null || getClass() != theO.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ResourceHistoryState that = (ResourceHistoryState) theO;
|
||||
return Objects.equals(myResourceText, that.myResourceText)
|
||||
&& Arrays.equals(myResourceBinary, that.myResourceBinary)
|
||||
&& myEncoding == that.myEncoding
|
||||
&& Objects.equals(myHashCode, that.myHashCode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Objects.hash(myResourceText, myEncoding, myHashCode);
|
||||
result = 31 * result + Arrays.hashCode(myResourceBinary);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new StringJoiner(", ", ResourceHistoryState.class.getSimpleName() + "[", "]")
|
||||
.add("myResourceText='" + myResourceText + "'")
|
||||
.add("myResourceBinary=" + Arrays.toString(myResourceBinary))
|
||||
.add("myEncoding=" + myEncoding)
|
||||
.add("myHashCode=" + myHashCode)
|
||||
.toString();
|
||||
}
|
||||
}
|
@ -477,10 +477,19 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
||||
|
||||
@Override
|
||||
public Optional<String> translatePidIdToForcedIdWithCache(JpaPid theId) {
|
||||
return myMemoryCacheService.get(
|
||||
MemoryCacheService.CacheEnum.PID_TO_FORCED_ID,
|
||||
theId.getId(),
|
||||
pid -> myResourceTableDao.findById(pid).map(ResourceTable::asTypedFhirResourceId));
|
||||
// do getIfPresent and then put to avoid doing I/O inside the cache.
|
||||
Optional<String> forcedId =
|
||||
myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theId.getId());
|
||||
|
||||
if (forcedId == null) {
|
||||
// This is only called when we know the resource exists.
|
||||
// So this optional is only empty when there is no hfj_forced_id table
|
||||
// note: this is obsolete with the new fhir_id column, and will go away.
|
||||
forcedId = myResourceTableDao.findById(theId.getId()).map(ResourceTable::asTypedFhirResourceId);
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theId.getId(), forcedId);
|
||||
}
|
||||
|
||||
return forcedId;
|
||||
}
|
||||
|
||||
private ListMultimap<String, String> organizeIdsByResourceType(Collection<IIdType> theIds) {
|
||||
|
@ -133,10 +133,12 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||
mdmLinkTable
|
||||
.addIndex("20230911.1", "IDX_EMPI_TGT_MR_LS")
|
||||
.unique(false)
|
||||
.online(true)
|
||||
.withColumns("TARGET_TYPE", "MATCH_RESULT", "LINK_SOURCE");
|
||||
mdmLinkTable
|
||||
.addIndex("20230911.2", "IDX_EMPi_TGT_MR_SCore")
|
||||
.unique(false)
|
||||
.online(true)
|
||||
.withColumns("TARGET_TYPE", "MATCH_RESULT", "SCORE");
|
||||
|
||||
// Move forced_id constraints to hfj_resource and the new fhir_id column
|
||||
@ -166,7 +168,11 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||
.withColumns("RES_TYPE", "FHIR_ID");
|
||||
|
||||
// For resolving references that don't supply the type.
|
||||
hfjResource.addIndex("20231027.3", "IDX_RES_FHIR_ID").unique(false).withColumns("FHIR_ID");
|
||||
hfjResource
|
||||
.addIndex("20231027.3", "IDX_RES_FHIR_ID")
|
||||
.unique(false)
|
||||
.online(true)
|
||||
.withColumns("FHIR_ID");
|
||||
|
||||
Builder.BuilderWithTableName batch2JobInstanceTable = version.onTable("BT2_JOB_INSTANCE");
|
||||
|
||||
@ -177,25 +183,32 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||
{
|
||||
version.executeRawSql(
|
||||
"20231212.1",
|
||||
"CREATE INDEX idx_sp_string_hash_nrm_pattern_ops ON public.hfj_spidx_string USING btree (hash_norm_prefix, sp_value_normalized varchar_pattern_ops, res_id, partition_id)")
|
||||
"CREATE INDEX CONCURRENTLY idx_sp_string_hash_nrm_pattern_ops ON public.hfj_spidx_string USING btree (hash_norm_prefix, sp_value_normalized varchar_pattern_ops, res_id, partition_id)")
|
||||
.setTransactional(false)
|
||||
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
|
||||
.onlyIf(
|
||||
String.format(
|
||||
QUERY_FOR_COLUMN_COLLATION_TEMPLATE,
|
||||
"HFJ_SPIDX_STRING".toLowerCase(),
|
||||
"SP_VALUE_NORMALIZED".toLowerCase()),
|
||||
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing");
|
||||
|
||||
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing")
|
||||
.onlyIf(
|
||||
"SELECT NOT EXISTS(select 1 from pg_indexes where indexname='idx_sp_string_hash_nrm_pattern_ops')",
|
||||
"Index idx_sp_string_hash_nrm_pattern_ops already exists");
|
||||
version.executeRawSql(
|
||||
"20231212.2",
|
||||
"CREATE UNIQUE INDEX idx_sp_uri_hash_identity_pattern_ops ON public.hfj_spidx_uri USING btree (hash_identity, sp_uri varchar_pattern_ops, res_id, partition_id)")
|
||||
"CREATE UNIQUE INDEX CONCURRENTLY idx_sp_uri_hash_identity_pattern_ops ON public.hfj_spidx_uri USING btree (hash_identity, sp_uri varchar_pattern_ops, res_id, partition_id)")
|
||||
.setTransactional(false)
|
||||
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
|
||||
.onlyIf(
|
||||
String.format(
|
||||
QUERY_FOR_COLUMN_COLLATION_TEMPLATE,
|
||||
"HFJ_SPIDX_URI".toLowerCase(),
|
||||
"SP_URI".toLowerCase()),
|
||||
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing");
|
||||
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing")
|
||||
.onlyIf(
|
||||
"SELECT NOT EXISTS(select 1 from pg_indexes where indexname='idx_sp_uri_hash_identity_pattern_ops')",
|
||||
"Index idx_sp_uri_hash_identity_pattern_ops already exists.");
|
||||
}
|
||||
|
||||
// This fix was bad for MSSQL, it has been set to do nothing.
|
||||
@ -622,6 +635,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||
version.executeRawSqls("20230402.1", Map.of(DriverTypeEnum.POSTGRES_9_4, postgresTuningStatements));
|
||||
|
||||
// Use an unlimited length text column for RES_TEXT_VC
|
||||
// N.B. This will FAIL SILENTLY on Oracle due to the fact that Oracle does not support an ALTER TABLE from
|
||||
// VARCHAR to
|
||||
// CLOB. Because of failureAllowed() this won't halt the migration
|
||||
version.onTable("HFJ_RES_VER")
|
||||
.modifyColumn("20230421.1", "RES_TEXT_VC")
|
||||
.nullable()
|
||||
|
@ -2466,7 +2466,7 @@ public class QueryStack {
|
||||
theRequestPartitionId,
|
||||
andPredicates,
|
||||
nextAnd)) {
|
||||
break;
|
||||
continue;
|
||||
}
|
||||
|
||||
EmbeddedChainedSearchModeEnum embeddedChainedSearchModeEnum =
|
||||
|
@ -649,7 +649,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
||||
.getMessage(TermReadSvcImpl.class, "valueSetExpandedUsingPreExpansion", expansionTimestamp);
|
||||
theAccumulator.addMessage(msg);
|
||||
expandConcepts(
|
||||
theExpansionOptions, theAccumulator, termValueSet, theFilter, theAdd, theAddedCodes, isOracleDialect());
|
||||
theExpansionOptions,
|
||||
theAccumulator,
|
||||
termValueSet,
|
||||
theFilter,
|
||||
theAdd,
|
||||
theAddedCodes,
|
||||
myHibernatePropertiesProvider.isOracleDialect());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@ -664,10 +670,6 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
||||
return expansionTimestamp;
|
||||
}
|
||||
|
||||
private boolean isOracleDialect() {
|
||||
return myHibernatePropertiesProvider.getDialect() instanceof org.hibernate.dialect.OracleDialect;
|
||||
}
|
||||
|
||||
private void expandConcepts(
|
||||
ValueSetExpansionOptions theExpansionOptions,
|
||||
IValueSetConceptAccumulator theAccumulator,
|
||||
|
@ -0,0 +1,326 @@
|
||||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import com.google.common.hash.HashCode;
|
||||
import com.google.common.hash.HashFunction;
|
||||
import com.google.common.hash.Hashing;
|
||||
import org.hl7.fhir.dstu3.hapi.ctx.FhirDstu3;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.hapi.ctx.FhirR4;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.LocalDate;
|
||||
import java.time.Month;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class ResourceHistoryCalculatorTest {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceHistoryCalculatorTest.class);
|
||||
|
||||
private static final FhirContext CONTEXT = FhirContext.forR4Cached();
|
||||
|
||||
private static final ResourceHistoryCalculator CALCULATOR_ORACLE = new ResourceHistoryCalculator(CONTEXT, true);
|
||||
private static final ResourceHistoryCalculator CALCULATOR_NON_ORACLE = new ResourceHistoryCalculator(CONTEXT, false);
|
||||
|
||||
private static final LocalDate TODAY = LocalDate.of(2024, Month.JANUARY, 25);
|
||||
private static final String ENCODED_RESOURCE_1 = "1234";
|
||||
private static final String ENCODED_RESOURCE_2 = "abcd";
|
||||
private static final String RESOURCE_TEXT_VC = "resourceTextVc";
|
||||
private static final List<String> EXCLUDED_ELEMENTS_1 = List.of("id");
|
||||
private static final List<String> EXCLUDED_ELEMENTS_2 = List.of("resourceType", "birthDate");
|
||||
private static final HashFunction SHA_256 = Hashing.sha256();
|
||||
|
||||
private static Stream<Arguments> calculateResourceHistoryStateArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), true, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), false, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), true, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), false, ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* The purpose of this test is to ensure that the conditional logic to pre-calculate resource history text or binaries
|
||||
* is respected.
|
||||
* If this is for Oracle, the resource text will be driven off a binary with a given encoding with the
|
||||
* resource text effectively ignored.
|
||||
* If this is not Oracle, it will be driven off a JSON encoded text field with
|
||||
* the binary effectively ignored.
|
||||
*/
|
||||
@ParameterizedTest
|
||||
@MethodSource("calculateResourceHistoryStateArguments")
|
||||
void calculateResourceHistoryState(FhirContext theFhirContext, boolean theIsOracle, ResourceEncodingEnum theResourceEncoding, List<String> theExcludedElements) {
|
||||
final IBaseResource patient = getPatient(theFhirContext);
|
||||
|
||||
final ResourceHistoryCalculator calculator = getCalculator(theFhirContext, theIsOracle);
|
||||
final ResourceHistoryState result = calculator.calculateResourceHistoryState(patient, theResourceEncoding, theExcludedElements);
|
||||
|
||||
if (theIsOracle) {
|
||||
assertNotNull(result.getResourceBinary()); // On Oracle: We use the resource binary to serve up the resource content
|
||||
assertNull(result.getResourceText()); // On Oracle: We do NOT use the resource text to serve up the resource content
|
||||
assertEquals(theResourceEncoding, result.getEncoding()); // On Oracle, the resource encoding is what we used to encode the binary
|
||||
assertEquals(SHA_256.hashBytes(result.getResourceBinary()), result.getHashCode()); // On Oracle, the SHA 256 hash is of the binary
|
||||
} else {
|
||||
assertNull(result.getResourceBinary()); // Non-Oracle: We do NOT use the resource binary to serve up the resource content
|
||||
assertNotNull(result.getResourceText()); // Non-Oracle: We use the resource text to serve up the resource content
|
||||
assertEquals(ResourceEncodingEnum.JSON, result.getEncoding()); // Non-Oracle, since we didn't encode a binary this is always JSON.
|
||||
final HashCode expectedHashCode = SHA_256.hashUnencodedChars(calculator.encodeResource(patient, theResourceEncoding, theExcludedElements)); // Non-Oracle, the SHA 256 hash is of the parsed resource object
|
||||
assertEquals(expectedHashCode, result.getHashCode());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static Stream<Arguments> conditionallyAlterHistoryEntityArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("conditionallyAlterHistoryEntityArguments")
|
||||
void conditionallyAlterHistoryEntity_usesVarcharForOracle(boolean theIsOracle, ResourceEncodingEnum theResourceEncoding, String theResourceText) {
|
||||
final ResourceTable resourceTable = new ResourceTable();
|
||||
resourceTable.setId(123L);
|
||||
|
||||
final ResourceHistoryTable resourceHistoryTable = new ResourceHistoryTable();
|
||||
resourceHistoryTable.setVersion(1);
|
||||
resourceHistoryTable.setResource("resource".getBytes(StandardCharsets.UTF_8));
|
||||
resourceHistoryTable.setEncoding(theResourceEncoding);
|
||||
resourceHistoryTable.setResourceTextVc(RESOURCE_TEXT_VC);
|
||||
|
||||
final boolean isChanged =
|
||||
getCalculator(theIsOracle).conditionallyAlterHistoryEntity(resourceTable, resourceHistoryTable, theResourceText);
|
||||
|
||||
if (theIsOracle) {
|
||||
assertFalse(isChanged);
|
||||
assertNotNull(resourceHistoryTable.getResource());
|
||||
assertEquals(RESOURCE_TEXT_VC, resourceHistoryTable.getResourceTextVc());
|
||||
assertEquals(resourceHistoryTable.getEncoding(), resourceHistoryTable.getEncoding());
|
||||
} else {
|
||||
assertTrue(isChanged);
|
||||
assertNull(resourceHistoryTable.getResource());
|
||||
assertEquals(theResourceText, resourceHistoryTable.getResourceTextVc());
|
||||
assertEquals(resourceHistoryTable.getEncoding(), ResourceEncodingEnum.JSON);
|
||||
}
|
||||
}
|
||||
|
||||
private static Stream<Arguments> encodeResourceArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forDstu3Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSONC, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.DEL, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.ESR, EXCLUDED_ELEMENTS_2),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_1),
|
||||
Arguments.of(FhirContext.forR4Cached(), ResourceEncodingEnum.JSON, EXCLUDED_ELEMENTS_2)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("encodeResourceArguments")
|
||||
void encodeResource_ensureFhirVersionSpecificAndIntendedElementsExcluded(FhirContext theFhirContext, ResourceEncodingEnum theResourceEncoding, List<String> theExcludedElements) {
|
||||
final IBaseResource patient = getPatient(theFhirContext);
|
||||
final String encodedResource = getCalculator(theFhirContext, true).encodeResource(patient, theResourceEncoding, theExcludedElements);
|
||||
|
||||
final String expectedEncoding =
|
||||
theResourceEncoding.newParser(theFhirContext).setDontEncodeElements(theExcludedElements).encodeResourceToString(patient);
|
||||
|
||||
assertEquals(expectedEncoding, encodedResource);
|
||||
}
|
||||
|
||||
private static Stream<Arguments> getResourceBinaryArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||
Arguments.of(ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||
Arguments.of(ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||
Arguments.of(ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||
Arguments.of(ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||
Arguments.of(ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||
Arguments.of(ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||
Arguments.of(ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("getResourceBinaryArguments")
|
||||
void getResourceBinary(ResourceEncodingEnum theResourceEncoding, String theEncodedResource) {
|
||||
final byte[] resourceBinary = ResourceHistoryCalculator.getResourceBinary(theResourceEncoding, theEncodedResource);
|
||||
|
||||
switch (theResourceEncoding) {
|
||||
case JSON:
|
||||
assertArrayEquals(theEncodedResource.getBytes(StandardCharsets.UTF_8), resourceBinary);
|
||||
break;
|
||||
case JSONC:
|
||||
assertArrayEquals(GZipUtil.compress(theEncodedResource), resourceBinary);
|
||||
break;
|
||||
case DEL :
|
||||
case ESR :
|
||||
default:
|
||||
assertArrayEquals(new byte[0], resourceBinary);
|
||||
}
|
||||
|
||||
ourLog.info("resourceBinary: {}", resourceBinary);
|
||||
}
|
||||
|
||||
private static Stream<Arguments> isResourceHistoryChangedArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(true, ENCODED_RESOURCE_1.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ENCODED_RESOURCE_1.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ENCODED_RESOURCE_2.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ENCODED_RESOURCE_2.getBytes(StandardCharsets.UTF_8), ENCODED_RESOURCE_2)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("isResourceHistoryChangedArguments")
|
||||
void isResourceHistoryChanged(boolean theIsOracle, byte[] theNewBinary, String theNewResourceText) {
|
||||
final String existngResourceText = ENCODED_RESOURCE_1;
|
||||
final byte[] existingBytes = existngResourceText.getBytes(StandardCharsets.UTF_8);
|
||||
|
||||
final ResourceHistoryTable resourceHistoryTable = new ResourceHistoryTable();
|
||||
resourceHistoryTable.setResource(existingBytes);
|
||||
resourceHistoryTable.setResourceTextVc(existngResourceText);
|
||||
|
||||
final boolean isChanged = getCalculator(theIsOracle).isResourceHistoryChanged(resourceHistoryTable, theNewBinary, theNewResourceText);
|
||||
|
||||
if (theIsOracle) {
|
||||
final boolean expectedResult = !Arrays.equals(existingBytes, theNewBinary);
|
||||
assertEquals(expectedResult, isChanged);
|
||||
} else {
|
||||
final boolean expectedResult = ! existngResourceText.equals(theNewResourceText);
|
||||
assertEquals(expectedResult, isChanged);
|
||||
}
|
||||
}
|
||||
|
||||
private static Stream<Arguments> populateEncodedResourceArguments() {
|
||||
return Stream.of(
|
||||
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSONC, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.DEL, ENCODED_RESOURCE_1),
|
||||
Arguments.of(true, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_1),
|
||||
Arguments.of(false, ResourceEncodingEnum.ESR, ENCODED_RESOURCE_2),
|
||||
Arguments.of(true, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_2),
|
||||
Arguments.of(false, ResourceEncodingEnum.JSON, ENCODED_RESOURCE_1)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("populateEncodedResourceArguments")
|
||||
void populateEncodedResource(boolean theIsOracle, ResourceEncodingEnum theResourceEncoding, String theEncodedResourceString) {
|
||||
final EncodedResource encodedResource = new EncodedResource();
|
||||
final byte[] resourceBinary = theEncodedResourceString.getBytes(StandardCharsets.UTF_8);
|
||||
|
||||
getCalculator(theIsOracle)
|
||||
.populateEncodedResource(encodedResource, theEncodedResourceString, resourceBinary, theResourceEncoding);
|
||||
|
||||
if (theIsOracle) {
|
||||
assertEquals(resourceBinary, encodedResource.getResourceBinary());
|
||||
assertNull(encodedResource.getResourceText());
|
||||
assertEquals(theResourceEncoding, encodedResource.getEncoding());
|
||||
} else {
|
||||
assertNull(encodedResource.getResourceBinary());
|
||||
assertEquals(theEncodedResourceString, encodedResource.getResourceText());
|
||||
assertEquals(ResourceEncodingEnum.JSON, encodedResource.getEncoding());
|
||||
}
|
||||
}
|
||||
|
||||
private ResourceHistoryCalculator getCalculator(boolean theIsOracle) {
|
||||
return theIsOracle ? CALCULATOR_ORACLE : CALCULATOR_NON_ORACLE;
|
||||
}
|
||||
|
||||
private ResourceHistoryCalculator getCalculator(FhirContext theFhirContext, boolean theIsOracle) {
|
||||
return new ResourceHistoryCalculator(theFhirContext, theIsOracle);
|
||||
}
|
||||
|
||||
private IBaseResource getPatient(FhirContext theFhirContext) {
|
||||
if (theFhirContext.getVersion() instanceof FhirR4) {
|
||||
return getPatientR4();
|
||||
}
|
||||
|
||||
if (theFhirContext.getVersion() instanceof FhirDstu3) {
|
||||
return getPatientDstu3();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private org.hl7.fhir.dstu3.model.Patient getPatientDstu3() {
|
||||
final org.hl7.fhir.dstu3.model.Patient patient = new org.hl7.fhir.dstu3.model.Patient();
|
||||
|
||||
patient.setId("123");
|
||||
patient.setBirthDate(Date.from(TODAY.atStartOfDay(ZoneId.of("America/Toronto")).toInstant()));
|
||||
|
||||
return patient;
|
||||
}
|
||||
|
||||
private Patient getPatientR4() {
|
||||
final Patient patient = new Patient();
|
||||
|
||||
patient.setId("123");
|
||||
patient.setBirthDate(Date.from(TODAY.atStartOfDay(ZoneId.of("America/Toronto")).toInstant()));
|
||||
|
||||
return patient;
|
||||
}
|
||||
}
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -32,8 +32,6 @@ import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
|
||||
@Entity
|
||||
@Table(
|
||||
name = ResourceHistoryTable.HFJ_RES_VER,
|
||||
@ -86,15 +84,12 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
||||
@OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
||||
private Collection<ResourceHistoryTag> myTags;
|
||||
|
||||
/**
|
||||
* Note: No setter for this field because it's only a legacy way of storing data now.
|
||||
*/
|
||||
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = true)
|
||||
@Lob()
|
||||
@OptimisticLock(excluded = true)
|
||||
private byte[] myResource;
|
||||
|
||||
@Column(name = "RES_TEXT_VC", nullable = true, length = Length.LONG32)
|
||||
@Column(name = "RES_TEXT_VC", length = Length.LONG32, nullable = true)
|
||||
@OptimisticLock(excluded = true)
|
||||
private String myResourceTextVc;
|
||||
|
||||
@ -155,8 +150,7 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
||||
}
|
||||
|
||||
public void setResourceTextVc(String theResourceTextVc) {
|
||||
myResource = null;
|
||||
myResourceTextVc = defaultString(theResourceTextVc);
|
||||
myResourceTextVc = theResourceTextVc;
|
||||
}
|
||||
|
||||
public ResourceHistoryProvenanceEntity getProvenance() {
|
||||
@ -212,6 +206,10 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
||||
return myResource;
|
||||
}
|
||||
|
||||
public void setResource(byte[] theResource) {
|
||||
myResource = theResource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getResourceId() {
|
||||
return myResourceId;
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -1,11 +1,16 @@
|
||||
package ca.uhn.fhir.validator;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.model.primitive.CodeDt;
|
||||
import ca.uhn.fhir.util.AttachmentUtil;
|
||||
import org.hl7.fhir.instance.model.api.ICompositeType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.Attachment;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
||||
public class AttachmentUtilTest {
|
||||
|
||||
@ -53,4 +58,18 @@ public class AttachmentUtilTest {
|
||||
String encoded = ctx.newJsonParser().encodeResourceToString(communication);
|
||||
assertEquals("{\"resourceType\":\"Communication\",\"payload\":[{\"contentAttachment\":{\"contentType\":\"text/plain\",\"data\":\"AAECAw==\",\"url\":\"http://foo\",\"size\":123}}]}", encoded);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetOrCreateContentTypeOnEmptyAttachmentR4(){
|
||||
FhirContext ctx = FhirContext.forR4Cached();
|
||||
Attachment attachment = (Attachment) AttachmentUtil.newInstance(ctx);
|
||||
|
||||
assertNull(attachment.getContentType());
|
||||
|
||||
IPrimitiveType<CodeDt> contentType = AttachmentUtil.getOrCreateContentType(ctx, attachment);
|
||||
|
||||
contentType.setValueAsString("text/plain");
|
||||
|
||||
assertNotNull(attachment.getContentType());
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -87,6 +87,7 @@ import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
@ -299,9 +300,9 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||
assertThat(result.getRequiresAccessToken(), is(equalTo(true)));
|
||||
assertThat(result.getTransactionTime(), is(notNullValue()));
|
||||
assertEquals(result.getOutput().size(), 3);
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Patient")).collect(Collectors.toList()).size());
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Observation")).collect(Collectors.toList()).size());
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Encounter")).collect(Collectors.toList()).size());
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Patient")).count());
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Observation")).count());
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Encounter")).count());
|
||||
|
||||
//We assert specifically on content as the deserialized version will "helpfully" fill in missing fields.
|
||||
assertThat(responseContent, containsString("\"error\" : [ ]"));
|
||||
@ -338,8 +339,8 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||
assertThat(result.getRequiresAccessToken(), is(equalTo(true)));
|
||||
assertThat(result.getTransactionTime(), is(notNullValue()));
|
||||
assertEquals(result.getOutput().size(), 1);
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Patient")).collect(Collectors.toList()).size());
|
||||
assertEquals(0, result.getOutput().stream().filter(o -> o.getType().equals("Binary")).collect(Collectors.toList()).size());
|
||||
assertEquals(1, result.getOutput().stream().filter(o -> o.getType().equals("Patient")).count());
|
||||
assertEquals(0, result.getOutput().stream().filter(o -> o.getType().equals("Binary")).count());
|
||||
|
||||
//We assert specifically on content as the deserialized version will "helpfully" fill in missing fields.
|
||||
assertThat(responseContent, containsString("\"error\" : [ ]"));
|
||||
@ -381,7 +382,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||
}
|
||||
|
||||
HashSet<String> types = Sets.newHashSet("Patient");
|
||||
BulkExportJobResults bulkExportJobResults = startSystemBulkExportJobAndAwaitCompletion(types, new HashSet<String>());
|
||||
BulkExportJobResults bulkExportJobResults = startSystemBulkExportJobAndAwaitCompletion(types, new HashSet<>());
|
||||
Map<String, List<String>> resourceTypeToBinaryIds = bulkExportJobResults.getResourceTypeToBinaryIds();
|
||||
assertThat(resourceTypeToBinaryIds.get("Patient"), hasSize(1));
|
||||
String patientBinaryId = resourceTypeToBinaryIds.get("Patient").get(0);
|
||||
@ -477,7 +478,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||
String entities = myJobInstanceRepository
|
||||
.findAll()
|
||||
.stream()
|
||||
.map(t -> t.toString())
|
||||
.map(Batch2JobInstanceEntity::toString)
|
||||
.collect(Collectors.joining("\n * "));
|
||||
ourLog.info("Entities:\n * " + entities);
|
||||
});
|
||||
@ -492,6 +493,41 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||
assertEquals(patientCount, jobInstance.getCombinedRecordsProcessed());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyExport() {
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Collections.singleton("Patient"));
|
||||
options.setFilters(Collections.emptySet());
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
startRequest.setParameters(options);
|
||||
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest);
|
||||
|
||||
assertNotNull(startResponse);
|
||||
|
||||
final String jobId = startResponse.getInstanceId();
|
||||
|
||||
// Run a scheduled pass to build the export
|
||||
myBatch2JobHelper.awaitJobCompletion(startResponse.getInstanceId());
|
||||
runInTransaction(() -> {
|
||||
String entities = myJobInstanceRepository
|
||||
.findAll()
|
||||
.stream()
|
||||
.map(Batch2JobInstanceEntity::toString)
|
||||
.collect(Collectors.joining("\n * "));
|
||||
ourLog.info("Entities:\n * " + entities);
|
||||
});
|
||||
|
||||
final Optional<JobInstance> optJobInstance = myJobPersistence.fetchInstance(jobId);
|
||||
assertNotNull(optJobInstance);
|
||||
assertTrue(optJobInstance.isPresent());
|
||||
assertThat(optJobInstance.get().getReport(),
|
||||
containsString("Export complete, but no data to generate report for job instance:"));
|
||||
}
|
||||
|
||||
private void logContentTypeAndResponse(Header[] headers, String response) {
|
||||
ourLog.info("**************************");
|
||||
ourLog.info("Content-Type is: {}", headers[0]);
|
||||
@ -542,7 +578,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||
|
||||
// test
|
||||
HashSet<String> types = Sets.newHashSet("Patient", "Observation");
|
||||
BulkExportJobResults bulkExportJobResults = startPatientBulkExportJobAndAwaitResults(types, new HashSet<String>(), "ha");
|
||||
BulkExportJobResults bulkExportJobResults = startPatientBulkExportJobAndAwaitResults(types, new HashSet<>(), "ha");
|
||||
Map<String, List<IBaseResource>> typeToResources = convertJobResultsToResources(bulkExportJobResults);
|
||||
assertThat(typeToResources.get("Patient"), hasSize(1));
|
||||
assertThat(typeToResources.get("Observation"), hasSize(1));
|
||||
@ -605,6 +641,34 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||
assertTrue(patientIds.contains(resourceId));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExportEmptyResult() {
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient"));
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
startRequest.setParameters(options);
|
||||
Batch2JobStartResponse job = myJobCoordinator.startInstance(mySrd, startRequest);
|
||||
myBatch2JobHelper.awaitJobCompletion(job.getInstanceId(), 60);
|
||||
ourLog.debug("Job status after awaiting - {}", myJobCoordinator.getInstance(job.getInstanceId()).getStatus());
|
||||
await()
|
||||
.atMost(300, TimeUnit.SECONDS)
|
||||
.until(() -> {
|
||||
StatusEnum status = myJobCoordinator.getInstance(job.getInstanceId()).getStatus();
|
||||
if (!StatusEnum.COMPLETED.equals(status)) {
|
||||
fail("Job status was changed from COMPLETE to " + status);
|
||||
}
|
||||
return myJobCoordinator.getInstance(job.getInstanceId()).getReport() != null;
|
||||
});
|
||||
|
||||
String report = myJobCoordinator.getInstance(job.getInstanceId()).getReport();
|
||||
assertThat(report,
|
||||
containsString("Export complete, but no data to generate report for job instance:"));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1081,7 +1145,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
""";
|
||||
Bundle bundle = parser.parseResource(Bundle.class, bundleStr);
|
||||
myClient.transaction().withBundle(bundle).execute();
|
||||
@ -1218,6 +1282,21 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||
assertThat(typeToContents.get("Patient"), not(containsString("POG2")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExportEmptyResult() {
|
||||
Group group = new Group();
|
||||
group.setId("Group/G-empty");
|
||||
group.setActive(true);
|
||||
myClient.update().resource(group).execute();
|
||||
|
||||
HashSet<String> resourceTypes = Sets.newHashSet("Patient");
|
||||
BulkExportJobResults bulkExportJobResults = startGroupBulkExportJobAndAwaitCompletion(
|
||||
resourceTypes, new HashSet<>(), "G-empty");
|
||||
|
||||
assertThat(bulkExportJobResults.getReportMsg(),
|
||||
startsWith("Export complete, but no data to generate report for job instance:"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupBulkExportMultipleResourceTypes() {
|
||||
Patient patient = new Patient();
|
||||
@ -1398,7 +1477,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||
Map<String, List<IBaseResource>> convertJobResultsToResources(BulkExportJobResults theResults) {
|
||||
Map<String, String> stringStringMap = convertJobResultsToStringContents(theResults);
|
||||
Map<String, List<IBaseResource>> typeToResources = new HashMap<>();
|
||||
stringStringMap.entrySet().forEach(entry -> typeToResources.put(entry.getKey(), convertNDJSONToResources(entry.getValue())));
|
||||
stringStringMap.forEach((key, value) -> typeToResources.put(key, convertNDJSONToResources(value)));
|
||||
return typeToResources;
|
||||
}
|
||||
|
||||
@ -1412,8 +1491,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||
private String getBinaryContentsAsString(String theBinaryId) {
|
||||
Binary binary = myBinaryDao.read(new IdType(theBinaryId));
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, binary.getContentType());
|
||||
String contents = new String(binary.getContent(), Constants.CHARSET_UTF8);
|
||||
return contents;
|
||||
return new String(binary.getContent(), Constants.CHARSET_UTF8);
|
||||
}
|
||||
|
||||
BulkExportJobResults startGroupBulkExportJobAndAwaitCompletion(HashSet<String> theResourceTypes, HashSet<String> theFilters, String theGroupId) {
|
||||
@ -1509,8 +1587,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
||||
await().atMost(300, TimeUnit.SECONDS).until(() -> myJobCoordinator.getInstance(jobInstanceId).getReport() != null);
|
||||
|
||||
String report = myJobCoordinator.getInstance(jobInstanceId).getReport();
|
||||
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
|
||||
return results;
|
||||
return JsonUtil.deserialize(report, BulkExportJobResults.class);
|
||||
}
|
||||
|
||||
private void verifyBulkExportResults(String theGroupId, HashSet<String> theFilters, List<String> theContainedList, List<String> theExcludedList) {
|
||||
|
@ -11,14 +11,18 @@ import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.util.SqlQuery;
|
||||
import ca.uhn.fhir.parser.StrictErrorHandler;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.AuditEvent;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Composition;
|
||||
import org.hl7.fhir.r4.model.Device;
|
||||
import org.hl7.fhir.r4.model.DomainResource;
|
||||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Location;
|
||||
import org.hl7.fhir.r4.model.MessageHeader;
|
||||
@ -27,22 +31,26 @@ import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Quantity;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.Date;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.countMatches;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.in;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
|
||||
@ -1573,6 +1581,76 @@ public class ChainingR4SearchTest extends BaseJpaR4Test {
|
||||
countUnionStatementsInGeneratedQuery("/Observation?subject:Location.name=Smith", 1);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
// search url expected count
|
||||
"/Bundle?composition.patient.identifier=system|value-1&composition.patient.birthdate=1980-01-01, 1", // correct identifier, correct birthdate
|
||||
"/Bundle?composition.patient.birthdate=1980-01-01&composition.patient.identifier=system|value-1, 1", // correct birthdate, correct identifier
|
||||
"/Bundle?composition.patient.identifier=system|value-1&composition.patient.birthdate=2000-01-01, 0", // correct identifier, incorrect birthdate
|
||||
"/Bundle?composition.patient.birthdate=2000-01-01&composition.patient.identifier=system|value-1, 0", // incorrect birthdate, correct identifier
|
||||
"/Bundle?composition.patient.identifier=system|value-2&composition.patient.birthdate=1980-01-01, 0", // incorrect identifier, correct birthdate
|
||||
"/Bundle?composition.patient.birthdate=1980-01-01&composition.patient.identifier=system|value-2, 0", // correct birthdate, incorrect identifier
|
||||
"/Bundle?composition.patient.identifier=system|value-2&composition.patient.birthdate=2000-01-01, 0", // incorrect identifier, incorrect birthdate
|
||||
"/Bundle?composition.patient.birthdate=2000-01-01&composition.patient.identifier=system|value-2, 0", // incorrect birthdate, incorrect identifier
|
||||
})
|
||||
public void testMultipleChainedBundleCompositionSearchParameters(String theSearchUrl, int theExpectedCount) {
|
||||
createSearchParameter("bundle-composition-patient-birthdate",
|
||||
"composition.patient.birthdate",
|
||||
"Bundle",
|
||||
"Bundle.entry.resource.ofType(Patient).birthDate",
|
||||
Enumerations.SearchParamType.DATE
|
||||
);
|
||||
|
||||
createSearchParameter("bundle-composition-patient-identifier",
|
||||
"composition.patient.identifier",
|
||||
"Bundle",
|
||||
"Bundle.entry.resource.ofType(Patient).identifier",
|
||||
Enumerations.SearchParamType.TOKEN
|
||||
);
|
||||
|
||||
createDocumentBundleWithPatientDetails("1980-01-01", "system", "value-1");
|
||||
|
||||
SearchParameterMap params = myMatchUrlService.getResourceSearch(theSearchUrl).getSearchParameterMap().setLoadSynchronous(true);
|
||||
assertSearchReturns(myBundleDao, params, theExpectedCount);
|
||||
}
|
||||
|
||||
private void createSearchParameter(String theId, String theCode, String theBase, String theExpression, Enumerations.SearchParamType theType) {
|
||||
SearchParameter searchParameter = new SearchParameter();
|
||||
searchParameter.setId(theId);
|
||||
searchParameter.setCode(theCode);
|
||||
searchParameter.setName(theCode);
|
||||
searchParameter.setUrl("http://example.org/SearchParameter/" + theId);
|
||||
searchParameter.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
searchParameter.addBase(theBase);
|
||||
searchParameter.setType(theType);
|
||||
searchParameter.setExpression(theExpression);
|
||||
searchParameter = (SearchParameter) mySearchParameterDao.update(searchParameter, mySrd).getResource();
|
||||
mySearchParamRegistry.forceRefresh();
|
||||
assertNotNull(mySearchParamRegistry.getActiveSearchParam(theBase, searchParameter.getName()));
|
||||
}
|
||||
|
||||
private void createDocumentBundleWithPatientDetails(String theBirthDate, String theIdentifierSystem, String theIdentifierValue) {
|
||||
Patient patient = new Patient();
|
||||
patient.setBirthDate(Date.valueOf(theBirthDate));
|
||||
patient.addIdentifier().setSystem(theIdentifierSystem).setValue(theIdentifierValue);
|
||||
patient = (Patient) myPatientDao.create(patient, mySrd).getResource();
|
||||
assertSearchReturns(myPatientDao, SearchParameterMap.newSynchronous(), 1);
|
||||
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.DOCUMENT);
|
||||
Composition composition = new Composition();
|
||||
composition.setType(new CodeableConcept().addCoding(new Coding().setCode("code").setSystem("http://example.org")));
|
||||
bundle.addEntry().setResource(composition);
|
||||
composition.getSubject().setReference(patient.getIdElement().getValue());
|
||||
bundle.addEntry().setResource(patient);
|
||||
myBundleDao.create(bundle, mySrd);
|
||||
assertSearchReturns(myBundleDao, SearchParameterMap.newSynchronous(), 1);
|
||||
}
|
||||
|
||||
private void assertSearchReturns(IFhirResourceDao<?> theDao, SearchParameterMap theSearchParams, int theExpectedCount){
|
||||
assertEquals(theExpectedCount, theDao.search(theSearchParams, mySrd).size());
|
||||
}
|
||||
|
||||
private void countUnionStatementsInGeneratedQuery(String theUrl, int theExpectedNumberOfUnions) throws IOException {
|
||||
myCaptureQueriesListener.clear();
|
||||
searchAndReturnUnqualifiedVersionlessIdValues(theUrl);
|
||||
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
@ -29,6 +30,7 @@ import ca.uhn.fhir.util.ClasspathUtil;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.exparity.hamcrest.date.DateMatchers;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
@ -39,6 +41,7 @@ import org.hl7.fhir.r4.model.DecimalType;
|
||||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Identifier;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
@ -48,8 +51,12 @@ import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.SampledData;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
import org.hl7.fhir.r4.model.StructureDefinition;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
@ -60,6 +67,7 @@ import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
@ -77,6 +85,7 @@ import static org.hamcrest.Matchers.matchesPattern;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
@ -1224,4 +1233,116 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
|
||||
assertEquals(1, ids.size());
|
||||
}
|
||||
|
||||
@Nested
|
||||
class ConditionalCreates {
|
||||
private static final String SYSTEM = "http://tempuri.org";
|
||||
private static final String VALUE_1 = "1";
|
||||
private static final String VALUE_2 = "2";
|
||||
|
||||
private final Task myTask1 = new Task()
|
||||
.setStatus(Task.TaskStatus.DRAFT)
|
||||
.setIntent(Task.TaskIntent.UNKNOWN)
|
||||
.addIdentifier(new Identifier()
|
||||
.setSystem(SYSTEM)
|
||||
.setValue(VALUE_1));
|
||||
|
||||
private final Task myTask2 = new Task()
|
||||
.setStatus(Task.TaskStatus.DRAFT)
|
||||
.setIntent(Task.TaskIntent.UNKNOWN)
|
||||
.addIdentifier(new Identifier()
|
||||
.setSystem(SYSTEM)
|
||||
.setValue(VALUE_2))
|
||||
.addBasedOn(new Reference().setReference("urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea"));
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
public void testConditionalCreateDependsOnPOSTedResource(boolean theHasQuestionMark) {
|
||||
final IFhirResourceDao<Task> taskDao = getTaskDao();
|
||||
taskDao.create(myTask1, new SystemRequestDetails());
|
||||
|
||||
final List<Task> allTasksPreBundle = searchAllTasks();
|
||||
assertEquals(1, allTasksPreBundle.size());
|
||||
final Task taskPreBundle = allTasksPreBundle.get(0);
|
||||
assertEquals(VALUE_1, taskPreBundle.getIdentifier().get(0).getValue());
|
||||
assertEquals(SYSTEM, taskPreBundle.getIdentifier().get(0).getSystem());
|
||||
|
||||
final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
|
||||
|
||||
final String entryConditionalTemplate = "%sidentifier=http://tempuri.org|1";
|
||||
final String matchUrl = String.format(entryConditionalTemplate, theHasQuestionMark ? "?" : "");
|
||||
|
||||
bundleBuilder.addTransactionCreateEntry(myTask2)
|
||||
.conditional(matchUrl);
|
||||
|
||||
final List<Bundle.BundleEntryComponent> responseEntries = sendBundleAndGetResponse(bundleBuilder.getBundle());
|
||||
|
||||
assertEquals(1, responseEntries.size());
|
||||
|
||||
final Bundle.BundleEntryComponent bundleEntry = responseEntries.get(0);
|
||||
|
||||
assertEquals("200 OK", bundleEntry.getResponse().getStatus());
|
||||
|
||||
final List<Task> allTasksPostBundle = searchAllTasks();
|
||||
assertEquals(1, allTasksPostBundle.size());
|
||||
final Task taskPostBundle = allTasksPostBundle.get(0);
|
||||
assertEquals(VALUE_1, taskPostBundle.getIdentifier().get(0).getValue());
|
||||
assertEquals(SYSTEM, taskPostBundle.getIdentifier().get(0).getSystem());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
public void testConditionalCreateDependsOnFirstEntryExisting(boolean theHasQuestionMark) {
|
||||
final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
|
||||
|
||||
bundleBuilder.addTransactionCreateEntry(myTask1, "urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea")
|
||||
.conditional("identifier=http://tempuri.org|1");
|
||||
|
||||
final String secondEntryConditionalTemplate = "%sidentifier=http://tempuri.org|2&based-on=urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea";
|
||||
final String secondMatchUrl = String.format(secondEntryConditionalTemplate, theHasQuestionMark ? "?" : "");
|
||||
|
||||
bundleBuilder.addTransactionCreateEntry(myTask2)
|
||||
.conditional(secondMatchUrl);
|
||||
|
||||
final IBaseBundle requestBundle = bundleBuilder.getBundle();
|
||||
assertTrue(requestBundle instanceof Bundle);
|
||||
|
||||
final List<Bundle.BundleEntryComponent> responseEntries = sendBundleAndGetResponse(requestBundle);
|
||||
|
||||
assertEquals(2, responseEntries.size());
|
||||
assertEquals(Set.of("201 Created"), responseEntries.stream().map(Bundle.BundleEntryComponent::getResponse).map(Bundle.BundleEntryResponseComponent::getStatus).collect(Collectors.toUnmodifiableSet()));
|
||||
|
||||
final List<Task> allTasksPostBundle = searchAllTasks();
|
||||
assertEquals(2, allTasksPostBundle.size());
|
||||
final Task taskPostBundle1 = allTasksPostBundle.get(0);
|
||||
assertEquals(VALUE_1, taskPostBundle1.getIdentifier().get(0).getValue());
|
||||
assertEquals(SYSTEM, taskPostBundle1.getIdentifier().get(0).getSystem());
|
||||
final Task taskPostBundle2 = allTasksPostBundle.get(1);
|
||||
assertEquals(VALUE_2, taskPostBundle2.getIdentifier().get(0).getValue());
|
||||
assertEquals(SYSTEM, taskPostBundle2.getIdentifier().get(0).getSystem());
|
||||
|
||||
final List<Reference> task2BasedOn = taskPostBundle2.getBasedOn();
|
||||
assertEquals(1, task2BasedOn.size());
|
||||
final Reference task2BasedOnReference = task2BasedOn.get(0);
|
||||
assertEquals(taskPostBundle1.getIdElement().toUnqualifiedVersionless().asStringValue(), task2BasedOnReference.getReference());
|
||||
}
|
||||
}
|
||||
|
||||
private List<Bundle.BundleEntryComponent> sendBundleAndGetResponse(IBaseBundle theRequestBundle) {
|
||||
assertTrue(theRequestBundle instanceof Bundle);
|
||||
|
||||
return mySystemDao.transaction(new SystemRequestDetails(), (Bundle)theRequestBundle).getEntry();
|
||||
}
|
||||
|
||||
private List<Task> searchAllTasks() {
|
||||
return unsafeCast(getTaskDao().search(SearchParameterMap.newSynchronous(), new SystemRequestDetails()).getAllResources());
|
||||
}
|
||||
|
||||
private IFhirResourceDao<Task> getTaskDao() {
|
||||
return unsafeCast(myDaoRegistry.getResourceDao("Task"));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> T unsafeCast(Object theObject) {
|
||||
return (T)theObject;
|
||||
}
|
||||
}
|
||||
|
@ -3379,7 +3379,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(7, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
@ -3462,7 +3462,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(5, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
}
|
||||
|
@ -380,17 +380,11 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
||||
observation.getEncounter().setReference(encounter.getId()); // not versioned
|
||||
builder.addTransactionCreateEntry(observation);
|
||||
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("200 OK", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
assertEquals("200 OK", outcome.getEntry().get(1).getResponse().getStatus());
|
||||
assertEquals("201 Created", outcome.getEntry().get(2).getResponse().getStatus());
|
||||
Bundle outcome = createAndValidateBundle((Bundle) builder.getBundle(),
|
||||
List.of("200 OK", "200 OK", "201 Created"), List.of("2", "1", "1"));
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType encounterId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(2).getResponse().getLocation());
|
||||
assertEquals("2", patientId.getVersionIdPart());
|
||||
assertEquals("1", encounterId.getVersionIdPart());
|
||||
assertEquals("1", observationId.getVersionIdPart());
|
||||
|
||||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
@ -429,14 +423,10 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
||||
builder.addTransactionCreateEntry(observation);
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("200 OK", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus());
|
||||
Bundle outcome = createAndValidateBundle((Bundle) builder.getBundle(),
|
||||
List.of("200 OK", "201 Created"), List.of("3", "1"));
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
assertEquals("3", patientId.getVersionIdPart());
|
||||
assertEquals("1", observationId.getVersionIdPart());
|
||||
|
||||
// Make sure we're not introducing any extra DB operations
|
||||
assertEquals(3, myCaptureQueriesListener.logSelectQueries().size());
|
||||
@ -468,14 +458,10 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("200 OK", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus());
|
||||
Bundle outcome = createAndValidateBundle((Bundle) builder.getBundle(),
|
||||
List.of("200 OK", "201 Created"), List.of("3", "1"));
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType observationId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
assertEquals("3", patientId.getVersionIdPart());
|
||||
assertEquals("1", observationId.getVersionIdPart());
|
||||
|
||||
// Make sure we're not introducing any extra DB operations
|
||||
assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
|
||||
@ -563,20 +549,91 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
||||
BundleBuilder builder = new BundleBuilder(myFhirContext);
|
||||
builder.addTransactionCreateEntry(messageHeader);
|
||||
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(builder.getBundle()));
|
||||
Bundle outcome = mySystemDao.transaction(mySrd, (Bundle) builder.getBundle());
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||
assertEquals("201 Created", outcome.getEntry().get(0).getResponse().getStatus());
|
||||
|
||||
Bundle outcome = createAndValidateBundle((Bundle) builder.getBundle(),
|
||||
List.of("201 Created"), List.of("1"));
|
||||
IdType messageHeaderId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
assertEquals("2", patient.getIdElement().getVersionIdPart());
|
||||
assertEquals("1", messageHeaderId.getVersionIdPart());
|
||||
|
||||
// read back and verify that reference is versioned
|
||||
messageHeader = myMessageHeaderDao.read(messageHeaderId);
|
||||
assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("#5619 Incorrect version of auto versioned reference for conditional update with urn id placeholder")
|
||||
public void testInsertVersionedReferencesByPath_conditionalUpdateNoOpInTransaction_addsCorrectVersionToReference() {
|
||||
Supplier<Bundle> supplier = () -> {
|
||||
// create patient
|
||||
Patient patient = new Patient();
|
||||
patient.setActive(true);
|
||||
patient.addIdentifier().setSystem("http://example.com").setValue("test");
|
||||
|
||||
// add patient to the Bundle - conditional update with placeholder url
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.TRANSACTION);
|
||||
bundle.addEntry()
|
||||
.setResource(patient)
|
||||
.setFullUrl("urn:uuid:00001")
|
||||
.getRequest()
|
||||
.setMethod(Bundle.HTTPVerb.PUT)
|
||||
.setUrl("Patient?identifier=http://example.com|test");
|
||||
|
||||
// create MessageHeader
|
||||
MessageHeader messageHeader = new MessageHeader();
|
||||
messageHeader.getMeta().setExtension(messageHeaderAutoVersionExtension);
|
||||
// add reference
|
||||
messageHeader.addFocus().setReference("urn:uuid:00001");
|
||||
|
||||
bundle.addEntry()
|
||||
.setResource(messageHeader)
|
||||
.getRequest()
|
||||
.setMethod(Bundle.HTTPVerb.POST)
|
||||
.setUrl("/MessageHeader");
|
||||
|
||||
return bundle;
|
||||
};
|
||||
|
||||
// create bundle first time
|
||||
Bundle outcome = createAndValidateBundle(supplier.get(),
|
||||
List.of("201 Created", "201 Created"), List.of("1", "1"));
|
||||
IdType patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
IdType messageHeaderId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
|
||||
// read back and verify that reference is versioned and correct
|
||||
Patient patient = myPatientDao.read(patientId);
|
||||
MessageHeader messageHeader = myMessageHeaderDao.read(messageHeaderId);
|
||||
assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference());
|
||||
|
||||
// create bundle second time
|
||||
outcome = createAndValidateBundle(supplier.get(), List.of("200 OK", "201 Created"), List.of("1", "1"));
|
||||
patientId = new IdType(outcome.getEntry().get(0).getResponse().getLocation());
|
||||
messageHeaderId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
|
||||
|
||||
// read back and verify that reference is versioned and correct
|
||||
patient = myPatientDao.read(patientId);
|
||||
messageHeader = myMessageHeaderDao.read(messageHeaderId);
|
||||
assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference());
|
||||
}
|
||||
|
||||
private Bundle createAndValidateBundle(Bundle theBundle, List<String> theOutcomeStatuses,
|
||||
List<String> theOutcomeVersions) {
|
||||
assertEquals(theBundle.getEntry().size(), theOutcomeStatuses.size(),
|
||||
"Size of OutcomeStatuses list is incorrect");
|
||||
assertEquals(theBundle.getEntry().size(), theOutcomeVersions.size(),
|
||||
"Size of OutcomeVersions list is incorrect");
|
||||
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(theBundle));
|
||||
Bundle result = mySystemDao.transaction(mySrd, theBundle);
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(theBundle));
|
||||
|
||||
for (int i = 0; i < result.getEntry().size(); i++) {
|
||||
assertEquals(theOutcomeStatuses.get(i), result.getEntry().get(i).getResponse().getStatus());
|
||||
IIdType resultId = new IdType(result.getEntry().get(i).getResponse().getLocation());
|
||||
assertEquals(theOutcomeVersions.get(i), resultId.getVersionIdPart());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private Patient createAndUpdatePatient(String thePatientId) {
|
||||
Patient patient = new Patient();
|
||||
patient.setId(thePatientId);
|
||||
|
@ -1,7 +1,6 @@
|
||||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
|
||||
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
@ -14,6 +13,7 @@ import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.client.interceptor.SimpleRequestHeaderInterceptor;
|
||||
import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
@ -37,11 +37,13 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Composition;
|
||||
import org.hl7.fhir.r4.model.Condition;
|
||||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.ExplanationOfBenefit;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Identifier;
|
||||
import org.hl7.fhir.r4.model.MessageHeader;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Observation.ObservationStatus;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
@ -49,11 +51,14 @@ import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Practitioner;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.Resource;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.EnumSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
@ -68,7 +73,9 @@ import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Test {
|
||||
@ -79,6 +86,8 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes
|
||||
private SearchParamMatcher mySearchParamMatcher;
|
||||
@Autowired
|
||||
private ThreadSafeResourceDeleterSvc myThreadSafeResourceDeleterSvc;
|
||||
private AuthorizationInterceptor myReadAllBundleInterceptor;
|
||||
private AuthorizationInterceptor myReadAllPatientInterceptor;
|
||||
|
||||
@BeforeEach
|
||||
@Override
|
||||
@ -87,7 +96,8 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes
|
||||
myStorageSettings.setAllowMultipleDelete(true);
|
||||
myStorageSettings.setExpungeEnabled(true);
|
||||
myStorageSettings.setDeleteExpungeEnabled(true);
|
||||
myServer.getRestfulServer().registerInterceptor(new BulkDataExportProvider());
|
||||
myReadAllBundleInterceptor = new ReadAllAuthorizationInterceptor("Bundle");
|
||||
myReadAllPatientInterceptor = new ReadAllAuthorizationInterceptor("Patient");
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -1506,4 +1516,250 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchBundles_withPermissionToSearchAllBundles_doesNotReturn403ForbiddenForDocumentBundles(){
|
||||
myServer.getRestfulServer().registerInterceptor(myReadAllBundleInterceptor);
|
||||
|
||||
Bundle bundle1 = createDocumentBundle(createPatient("John", "Smith"));
|
||||
Bundle bundle2 = createDocumentBundle(createPatient("Jane", "Doe"));
|
||||
assertSearchContainsResources("/Bundle", bundle1, bundle2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchBundles_withPermissionToSearchAllBundles_doesNotReturn403ForbiddenForCollectionBundles(){
|
||||
myServer.getRestfulServer().registerInterceptor(myReadAllBundleInterceptor);
|
||||
|
||||
Bundle bundle1 = createCollectionBundle(createPatient("John", "Smith"));
|
||||
Bundle bundle2 = createCollectionBundle(createPatient("Jane", "Doe"));
|
||||
assertSearchContainsResources("/Bundle", bundle1, bundle2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchBundles_withPermissionToSearchAllBundles_doesNotReturn403ForbiddenForMessageBundles(){
|
||||
myServer.getRestfulServer().registerInterceptor(myReadAllBundleInterceptor);
|
||||
|
||||
Bundle bundle1 = createMessageHeaderBundle(createPatient("John", "Smith"));
|
||||
Bundle bundle2 = createMessageHeaderBundle(createPatient("Jane", "Doe"));
|
||||
assertSearchContainsResources("/Bundle", bundle1, bundle2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchBundles_withPermissionToViewOneBundle_onlyAllowsViewingOneBundle(){
|
||||
Bundle bundle1 = createMessageHeaderBundle(createPatient("John", "Smith"));
|
||||
Bundle bundle2 = createMessageHeaderBundle(createPatient("Jane", "Doe"));
|
||||
|
||||
myServer.getRestfulServer().getInterceptorService().registerInterceptor(
|
||||
new ReadInCompartmentAuthorizationInterceptor("Bundle", bundle1.getIdElement())
|
||||
);
|
||||
|
||||
assertSearchContainsResources("/Bundle?_id=" + bundle1.getIdPart(), bundle1);
|
||||
assertSearchFailsWith403Forbidden("/Bundle?_id=" + bundle2.getIdPart());
|
||||
assertSearchFailsWith403Forbidden("/Bundle");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchPatients_withPermissionToSearchAllBundles_returns403Forbidden(){
|
||||
myServer.getRestfulServer().registerInterceptor(myReadAllBundleInterceptor);
|
||||
|
||||
createPatient("John", "Smith");
|
||||
createPatient("Jane", "Doe");
|
||||
assertSearchFailsWith403Forbidden("/Patient");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchPatients_withPermissionToSearchAllPatients_returnsAllPatients(){
|
||||
myServer.getRestfulServer().registerInterceptor(myReadAllPatientInterceptor);
|
||||
|
||||
Patient patient1 = createPatient("John", "Smith");
|
||||
Patient patient2 = createPatient("Jane", "Doe");
|
||||
assertSearchContainsResources("/Patient", patient1, patient2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchPatients_withPermissionToViewOnePatient_onlyAllowsViewingOnePatient(){
|
||||
Patient patient1 = createPatient("John", "Smith");
|
||||
Patient patient2 = createPatient("Jane", "Doe");
|
||||
|
||||
myServer.getRestfulServer().getInterceptorService().registerInterceptor(
|
||||
new ReadInCompartmentAuthorizationInterceptor("Patient", patient1.getIdElement())
|
||||
);
|
||||
|
||||
assertSearchContainsResources("/Patient?_id=" + patient1.getIdPart(), patient1);
|
||||
assertSearchFailsWith403Forbidden("/Patient?_id=" + patient2.getIdPart());
|
||||
assertSearchFailsWith403Forbidden("/Patient");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToListOfResourcesAndExcludeContainer_withSearchSetContainingDocumentBundles_onlyRecursesOneLevelDeep() {
|
||||
Bundle bundle1 = createDocumentBundle(createPatient("John", "Smith"));
|
||||
Bundle bundle2 = createDocumentBundle(createPatient("John", "Smith"));
|
||||
Bundle searchSet = createSearchSet(bundle1, bundle2);
|
||||
|
||||
RequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setResourceName("Bundle");
|
||||
|
||||
List<IBaseResource> resources = AuthorizationInterceptor.toListOfResourcesAndExcludeContainer(requestDetails, searchSet, myFhirContext);
|
||||
assertEquals(2, resources.size());
|
||||
assertTrue(resources.contains(bundle1));
|
||||
assertTrue(resources.contains(bundle2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToListOfResourcesAndExcludeContainer_withSearchSetContainingPatients_returnsPatients() {
|
||||
Patient patient1 = createPatient("John", "Smith");
|
||||
Patient patient2 = createPatient("Jane", "Doe");
|
||||
Bundle searchSet = createSearchSet(patient1, patient2);
|
||||
|
||||
RequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setResourceName("Patient");
|
||||
|
||||
List<IBaseResource> resources = AuthorizationInterceptor.toListOfResourcesAndExcludeContainer(requestDetails, searchSet, myFhirContext);
|
||||
assertEquals(2, resources.size());
|
||||
assertTrue(resources.contains(patient1));
|
||||
assertTrue(resources.contains(patient2));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@EnumSource(value = Bundle.BundleType.class, names = {"DOCUMENT", "COLLECTION", "MESSAGE"})
|
||||
public void testShouldExamineBundleResources_withBundleRequestAndStandAloneBundleType_returnsFalse(Bundle.BundleType theBundleType){
|
||||
RequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setResourceName("Bundle");
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(theBundleType);
|
||||
|
||||
assertFalse(AuthorizationInterceptor.shouldExamineBundleChildResources(requestDetails, myFhirContext, bundle));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@EnumSource(value = Bundle.BundleType.class, names = {"DOCUMENT", "COLLECTION", "MESSAGE"}, mode= EnumSource.Mode.EXCLUDE)
|
||||
public void testShouldExamineBundleResources_withBundleRequestAndNonStandAloneBundleType_returnsTrue(Bundle.BundleType theBundleType){
|
||||
RequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setResourceName("Bundle");
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(theBundleType);
|
||||
|
||||
assertTrue(AuthorizationInterceptor.shouldExamineBundleChildResources(requestDetails, myFhirContext, bundle));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@EnumSource(value = Bundle.BundleType.class)
|
||||
public void testShouldExamineBundleResources_withNonBundleRequests_returnsTrue(Bundle.BundleType theBundleType){
|
||||
RequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setResourceName("Patient");
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(theBundleType);
|
||||
|
||||
assertTrue(AuthorizationInterceptor.shouldExamineBundleChildResources(requestDetails, myFhirContext, bundle));
|
||||
}
|
||||
|
||||
private Patient createPatient(String theFirstName, String theLastName){
|
||||
Patient patient = new Patient();
|
||||
patient.addName().addGiven(theFirstName).setFamily(theLastName);
|
||||
return (Patient) myPatientDao.create(patient, mySrd).getResource();
|
||||
}
|
||||
|
||||
private Bundle createDocumentBundle(Patient thePatient){
|
||||
Composition composition = new Composition();
|
||||
composition.setType(new CodeableConcept().addCoding(new Coding().setSystem("http://example.org").setCode("some-type")));
|
||||
composition.getSubject().setReference(thePatient.getIdElement().getValue());
|
||||
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.DOCUMENT);
|
||||
bundle.addEntry().setResource(composition);
|
||||
bundle.addEntry().setResource(thePatient);
|
||||
return (Bundle) myBundleDao.create(bundle, mySrd).getResource();
|
||||
}
|
||||
|
||||
private Bundle createCollectionBundle(Patient thePatient) {
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.COLLECTION);
|
||||
bundle.addEntry().setResource(thePatient);
|
||||
return (Bundle) myBundleDao.create(bundle, mySrd).getResource();
|
||||
}
|
||||
|
||||
private Bundle createMessageHeaderBundle(Patient thePatient) {
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.MESSAGE);
|
||||
|
||||
MessageHeader messageHeader = new MessageHeader();
|
||||
Coding event = new Coding().setSystem("http://acme.com").setCode("some-event");
|
||||
messageHeader.setEvent(event);
|
||||
messageHeader.getFocusFirstRep().setReference(thePatient.getIdElement().getValue());
|
||||
bundle.addEntry().setResource(messageHeader);
|
||||
bundle.addEntry().setResource(thePatient);
|
||||
|
||||
return (Bundle) myBundleDao.create(bundle, mySrd).getResource();
|
||||
}
|
||||
|
||||
private void assertSearchContainsResources(String theUrl, Resource... theExpectedResources){
|
||||
List<String> expectedIds = Arrays.stream(theExpectedResources)
|
||||
.map(resource -> resource.getIdPart())
|
||||
.toList();
|
||||
|
||||
Bundle searchResult = myClient
|
||||
.search()
|
||||
.byUrl(theUrl)
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
|
||||
List<String> actualIds = searchResult.getEntry().stream()
|
||||
.map(entry -> entry.getResource().getIdPart())
|
||||
.toList();
|
||||
|
||||
assertEquals(expectedIds.size(), actualIds.size());
|
||||
assertTrue(expectedIds.containsAll(actualIds));
|
||||
}
|
||||
|
||||
private void assertSearchFailsWith403Forbidden(String theUrl){
|
||||
try {
|
||||
myClient.search().byUrl(theUrl).execute();
|
||||
fail();
|
||||
} catch (Exception e){
|
||||
assertTrue(e.getMessage().contains("HTTP 403 Forbidden"));
|
||||
}
|
||||
}
|
||||
|
||||
private Bundle createSearchSet(Resource... theResources){
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.setType(Bundle.BundleType.SEARCHSET);
|
||||
Arrays.stream(theResources).forEach(resource -> bundle.addEntry().setResource(resource));
|
||||
return bundle;
|
||||
}
|
||||
|
||||
static class ReadAllAuthorizationInterceptor extends AuthorizationInterceptor {
|
||||
|
||||
private final String myResourceType;
|
||||
|
||||
public ReadAllAuthorizationInterceptor(String theResourceType){
|
||||
super(PolicyEnum.DENY);
|
||||
myResourceType = theResourceType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
|
||||
return new RuleBuilder()
|
||||
.allow().read().resourcesOfType(myResourceType).withAnyId().andThen()
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
static class ReadInCompartmentAuthorizationInterceptor extends AuthorizationInterceptor {
|
||||
|
||||
private final String myResourceType;
|
||||
private final IIdType myId;
|
||||
|
||||
public ReadInCompartmentAuthorizationInterceptor(String theResourceType, IIdType theId){
|
||||
super(PolicyEnum.DENY);
|
||||
myResourceType = theResourceType;
|
||||
myId = theId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
|
||||
return new RuleBuilder()
|
||||
.allow().read().allResources().inCompartment(myResourceType, myId).andThen()
|
||||
.build();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -17,8 +17,10 @@ import ca.uhn.fhir.jpa.cache.ResourceChangeListenerCacheRefresherImpl;
|
||||
import ca.uhn.fhir.jpa.cache.ResourceChangeListenerRegistryImpl;
|
||||
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
|
||||
import ca.uhn.fhir.jpa.cache.ResourceVersionMap;
|
||||
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
|
||||
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
|
||||
import ca.uhn.fhir.jpa.dao.JpaResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.ResourceHistoryCalculator;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.DaoSearchParamSynchronizer;
|
||||
@ -148,6 +150,7 @@ public class GiantTransactionPerfTest {
|
||||
private IIdHelperService myIdHelperService;
|
||||
@Mock
|
||||
private IJpaStorageResourceParser myJpaStorageResourceParser;
|
||||
private final ResourceHistoryCalculator myResourceHistoryCalculator = new ResourceHistoryCalculator(FhirContext.forR4Cached(), false);
|
||||
private IMetaTagSorter myMetaTagSorter;
|
||||
|
||||
@AfterEach
|
||||
@ -271,6 +274,7 @@ public class GiantTransactionPerfTest {
|
||||
myEobDao.setJpaStorageResourceParserForUnitTest(myJpaStorageResourceParser);
|
||||
myEobDao.setExternallyStoredResourceServiceRegistryForUnitTest(new ExternallyStoredResourceServiceRegistry());
|
||||
myEobDao.setMyMetaTagSorter(myMetaTagSorter);
|
||||
myEobDao.setResourceHistoryCalculator(myResourceHistoryCalculator);
|
||||
myEobDao.start();
|
||||
|
||||
myDaoRegistry.setResourceDaos(Lists.newArrayList(myEobDao));
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -0,0 +1,135 @@
|
||||
package ca.uhn.fhir.jpa.auth;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.IAuthRule;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.IRuleApplier;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.RuleBuilder;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.r4.model.CodeType;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class RuleImplOpTest {
|
||||
private static final String OPERATION = "operation";
|
||||
private static final String TYPE = "type";
|
||||
private static final String PATH = "path";
|
||||
private static final String VALUE = "value";
|
||||
private static final String REPLACE = "replace";
|
||||
private static final String PATIENT_BIRTH_DATE = "Patient.birthDate";
|
||||
private static final Parameters PARAMETERS = buildParameters();
|
||||
private static final String DOCUMENT = "document";
|
||||
private static final String ERROR_TEMPLATE = "HAPI-0339: Can not handle transaction with nested resource of type %s";
|
||||
private static final String ERROR_PARAMETERS = String.format(ERROR_TEMPLATE, "Parameters");
|
||||
private static final String ERROR_BUNDLE = String.format(ERROR_TEMPLATE, "Bundle");
|
||||
|
||||
private static final String REQUEST_RULELIST = AuthorizationInterceptor.class.getName() + "_1_RULELIST";
|
||||
private final Patient myPatient = buildPatient();
|
||||
|
||||
private final List<IAuthRule> myRules = new RuleBuilder()
|
||||
.allow()
|
||||
.transaction()
|
||||
.withAnyOperation()
|
||||
.andApplyNormalRules()
|
||||
.andThen()
|
||||
.allow()
|
||||
.write()
|
||||
.allResources()
|
||||
.withAnyId()
|
||||
.build();
|
||||
|
||||
private final IAuthRule myRule = myRules.get(0);
|
||||
private final FhirContext myFhirContext = FhirContext.forR4Cached();
|
||||
private final IBaseBundle myInnerBundle = buildInnerBundler(myFhirContext);
|
||||
|
||||
private final RequestDetails mySystemRequestDetails = buildSystemRequestDetails(myFhirContext, myRules);
|
||||
private final IRuleApplier myRuleApplier = new AuthorizationInterceptor();
|
||||
|
||||
@Test
|
||||
void testTransactionBundleUpdateWithParameters() {
|
||||
final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
|
||||
bundleBuilder.addTransactionUpdateEntry(PARAMETERS);
|
||||
|
||||
try {
|
||||
applyRule(bundleBuilder.getBundle());
|
||||
fail("Expected an InvalidRequestException");
|
||||
} catch (InvalidRequestException exception) {
|
||||
assertEquals(ERROR_PARAMETERS, exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransactionBundleWithNestedBundle() {
|
||||
final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
|
||||
bundleBuilder.addTransactionCreateEntry(myInnerBundle);
|
||||
|
||||
try {
|
||||
applyRule(bundleBuilder.getBundle());
|
||||
fail("Expected an InvalidRequestException");
|
||||
} catch (InvalidRequestException exception) {
|
||||
assertEquals(ERROR_BUNDLE, exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransactionBundlePatchWithParameters() {
|
||||
final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
|
||||
bundleBuilder.addTransactionFhirPatchEntry(myPatient.getIdElement(), PARAMETERS);
|
||||
|
||||
final AuthorizationInterceptor.Verdict verdict = applyRule(bundleBuilder.getBundle());
|
||||
|
||||
assertThat(verdict.getDecision(), equalTo(PolicyEnum.ALLOW));
|
||||
}
|
||||
|
||||
private AuthorizationInterceptor.Verdict applyRule(IBaseBundle theBundle) {
|
||||
return myRule.applyRule(RestOperationTypeEnum.TRANSACTION, mySystemRequestDetails, theBundle, myPatient.getIdElement(), myPatient, myRuleApplier, new HashSet<>(), null);
|
||||
}
|
||||
|
||||
private static Parameters buildParameters() {
|
||||
final Parameters patch = new Parameters();
|
||||
|
||||
final Parameters.ParametersParameterComponent op = patch.addParameter().setName(OPERATION);
|
||||
op.addPart().setName(TYPE).setValue(new CodeType(REPLACE));
|
||||
op.addPart().setName(PATH).setValue(new CodeType(PATIENT_BIRTH_DATE));
|
||||
op.addPart().setName(VALUE).setValue(new StringType("1912-04-14"));
|
||||
|
||||
return patch;
|
||||
}
|
||||
|
||||
private static RequestDetails buildSystemRequestDetails(FhirContext theFhirContext, List<IAuthRule> theRules) {
|
||||
final SystemRequestDetails systemRequestDetails = new SystemRequestDetails();
|
||||
systemRequestDetails.setFhirContext(theFhirContext);
|
||||
systemRequestDetails.getUserData().put(REQUEST_RULELIST, theRules);
|
||||
|
||||
return systemRequestDetails;
|
||||
}
|
||||
|
||||
private static Patient buildPatient() {
|
||||
final Patient patient = new Patient();
|
||||
patient.setId(new IdType("Patient", "1"));
|
||||
return patient;
|
||||
}
|
||||
|
||||
private static IBaseBundle buildInnerBundler(FhirContext theFhirContext) {
|
||||
final BundleBuilder innerBundleBuilder = new BundleBuilder(theFhirContext);
|
||||
innerBundleBuilder.setType(DOCUMENT);
|
||||
return innerBundleBuilder.getBundle();
|
||||
}
|
||||
}
|
@ -18,7 +18,8 @@ import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.isOneOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.oneOf;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
|
||||
@ -98,10 +99,10 @@ public class PartitionRunnerTest {
|
||||
getPartitionRunner(5).runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
List<HookParams> calls = myLatch.awaitExpected();
|
||||
PartitionCall partitionCall1 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 0);
|
||||
assertThat(partitionCall1.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2));
|
||||
assertThat(partitionCall1.threadName, is(oneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)));
|
||||
assertEquals(5, partitionCall1.size);
|
||||
PartitionCall partitionCall2 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 1);
|
||||
assertThat(partitionCall2.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2));
|
||||
assertThat(partitionCall2.threadName, is(oneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)));
|
||||
assertEquals(5, partitionCall2.size);
|
||||
assertNotEquals(partitionCall1.threadName, partitionCall2.threadName);
|
||||
}
|
||||
@ -119,14 +120,38 @@ public class PartitionRunnerTest {
|
||||
getPartitionRunner(5).runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
List<HookParams> calls = myLatch.awaitExpected();
|
||||
PartitionCall partitionCall1 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 0);
|
||||
assertThat(partitionCall1.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2));
|
||||
assertThat(partitionCall1.threadName, is(oneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)));
|
||||
assertEquals(true, nums.remove(partitionCall1.size));
|
||||
PartitionCall partitionCall2 = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, 1);
|
||||
assertThat(partitionCall2.threadName, isOneOf(TEST_THREADNAME_1, TEST_THREADNAME_2));
|
||||
assertThat(partitionCall2.threadName, is(oneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)));
|
||||
assertEquals(true, nums.remove(partitionCall2.size));
|
||||
assertNotEquals(partitionCall1.threadName, partitionCall2.threadName);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* See #5636 $expunge operation ignoring ExpungeThreadCount setting in certain cases
|
||||
*/
|
||||
@Test
|
||||
public void testExpunge_withTasksSizeBiggerThanExecutorQueue_usesConfiguredNumberOfThreads() throws InterruptedException {
|
||||
// setup
|
||||
List<IResourcePersistentId> resourceIds = buildPidList(2500);
|
||||
Consumer<List<IResourcePersistentId>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
// with batch size = 2 we expect 2500/2 runnableTasks to be created
|
||||
myLatch.setExpectedCount(1250);
|
||||
|
||||
// execute
|
||||
getPartitionRunner(2, 2).runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
List<HookParams> calls = myLatch.awaitExpected();
|
||||
|
||||
// validate - only two threads should be used for execution
|
||||
for (int i = 0; i < 1250; i++) {
|
||||
PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(calls, i);
|
||||
assertThat(partitionCall.threadName, is(oneOf(TEST_THREADNAME_1, TEST_THREADNAME_2)));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void tenItemsOneThread() throws InterruptedException {
|
||||
List<IResourcePersistentId> resourceIds = buildPidList(10);
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -25,12 +25,14 @@ import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.Hook;
|
||||
import ca.uhn.fhir.interceptor.api.Interceptor;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.consent.ConsentInterceptor;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import com.google.common.collect.Lists;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
@ -78,8 +80,12 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
||||
|
||||
public static final String REQUEST_ATTRIBUTE_BULK_DATA_EXPORT_OPTIONS =
|
||||
AuthorizationInterceptor.class.getName() + "_BulkDataExportOptions";
|
||||
public static final String BUNDLE = "Bundle";
|
||||
private static final AtomicInteger ourInstanceCount = new AtomicInteger(0);
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(AuthorizationInterceptor.class);
|
||||
private static final Set<BundleTypeEnum> STANDALONE_BUNDLE_RESOURCE_TYPES =
|
||||
Set.of(BundleTypeEnum.DOCUMENT, BundleTypeEnum.COLLECTION, BundleTypeEnum.MESSAGE);
|
||||
|
||||
private final int myInstanceIndex = ourInstanceCount.incrementAndGet();
|
||||
private final String myRequestSeenResourcesKey =
|
||||
AuthorizationInterceptor.class.getName() + "_" + myInstanceIndex + "_SEENRESOURCES";
|
||||
@ -525,7 +531,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
||||
case EXTENDED_OPERATION_TYPE:
|
||||
case EXTENDED_OPERATION_INSTANCE: {
|
||||
if (theResponseObject != null) {
|
||||
resources = toListOfResourcesAndExcludeContainer(theResponseObject, fhirContext);
|
||||
resources = toListOfResourcesAndExcludeContainer(theRequestDetails, theResponseObject, fhirContext);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@ -572,22 +578,23 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
||||
OUT,
|
||||
}
|
||||
|
||||
static List<IBaseResource> toListOfResourcesAndExcludeContainer(
|
||||
IBaseResource theResponseObject, FhirContext fhirContext) {
|
||||
public static List<IBaseResource> toListOfResourcesAndExcludeContainer(
|
||||
RequestDetails theRequestDetails, IBaseResource theResponseObject, FhirContext fhirContext) {
|
||||
if (theResponseObject == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<IBaseResource> retVal;
|
||||
|
||||
boolean isContainer = false;
|
||||
boolean shouldExamineChildResources = false;
|
||||
if (theResponseObject instanceof IBaseBundle) {
|
||||
isContainer = true;
|
||||
IBaseBundle bundle = (IBaseBundle) theResponseObject;
|
||||
shouldExamineChildResources = shouldExamineBundleChildResources(theRequestDetails, fhirContext, bundle);
|
||||
} else if (theResponseObject instanceof IBaseParameters) {
|
||||
isContainer = true;
|
||||
shouldExamineChildResources = true;
|
||||
}
|
||||
|
||||
if (!isContainer) {
|
||||
if (!shouldExamineChildResources) {
|
||||
return Collections.singletonList(theResponseObject);
|
||||
}
|
||||
|
||||
@ -604,6 +611,26 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method determines if the given Bundle should have permissions applied to the resources inside or
|
||||
* to the Bundle itself.
|
||||
*
|
||||
* This distinction is important in Bundle requests where a user has permissions to view all Bundles. In
|
||||
* this scenario we want to apply permissions to the Bundle itself and not the resources inside if
|
||||
* the Bundle is of type document, collection, or message.
|
||||
*/
|
||||
public static boolean shouldExamineBundleChildResources(
|
||||
RequestDetails theRequestDetails, FhirContext theFhirContext, IBaseBundle theBundle) {
|
||||
boolean isBundleRequest = theRequestDetails != null && BUNDLE.equals(theRequestDetails.getResourceName());
|
||||
if (!isBundleRequest) {
|
||||
return true;
|
||||
}
|
||||
BundleTypeEnum bundleType = BundleUtil.getBundleTypeEnum(theFhirContext, theBundle);
|
||||
boolean isStandaloneBundleResource =
|
||||
bundleType != null && STANDALONE_BUNDLE_RESOURCE_TYPES.contains(bundleType);
|
||||
return !isStandaloneBundleResource;
|
||||
}
|
||||
|
||||
public static class Verdict {
|
||||
|
||||
private final IAuthRule myDecidingRule;
|
||||
|
@ -63,6 +63,8 @@ import static org.hl7.fhir.instance.model.api.IAnyResource.SP_RES_ID;
|
||||
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
|
||||
class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(RuleImplOp.class);
|
||||
private static final String PARAMETERS = "Parameters";
|
||||
private static final String BUNDLE = "Bundle";
|
||||
|
||||
private AppliesTypeEnum myAppliesTo;
|
||||
private Set<String> myAppliesToTypes;
|
||||
@ -771,7 +773,10 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
||||
*/
|
||||
if (nextPart.getResource() != null) {
|
||||
RuntimeResourceDefinition resourceDef = ctx.getResourceDefinition(nextPart.getResource());
|
||||
if ("Parameters".equals(resourceDef.getName()) || "Bundle".equals(resourceDef.getName())) {
|
||||
|
||||
// TODO: LD: We should pursue a more ideal fix after the release to inspect the bundle more deeply
|
||||
// to ensure that it's a valid request
|
||||
if (shouldRejectBundleEntry(resourceDef, operation)) {
|
||||
throw new InvalidRequestException(Msg.code(339)
|
||||
+ "Can not handle transaction with nested resource of type " + resourceDef.getName());
|
||||
}
|
||||
@ -812,7 +817,7 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
||||
} else if (theOutputResource != null) {
|
||||
|
||||
List<IBaseResource> outputResources = AuthorizationInterceptor.toListOfResourcesAndExcludeContainer(
|
||||
theOutputResource, theRequestDetails.getFhirContext());
|
||||
theRequestDetails, theOutputResource, theRequestDetails.getFhirContext());
|
||||
|
||||
Verdict verdict = null;
|
||||
for (IBaseResource nextResource : outputResources) {
|
||||
@ -835,6 +840,24 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ascertain whether this transaction request contains a nested operations or nested transactions.
|
||||
* This is done carefully because a bundle can contain a nested PATCH with Parameters, which is supported but
|
||||
* a non-PATCH nested Parameters resource may be problematic.
|
||||
*
|
||||
* @param theResourceDef The {@link RuntimeResourceDefinition} associated with this bundle entry
|
||||
* @param theOperation The {@link RestOperationTypeEnum} associated with this bundle entry
|
||||
* @return true if we should reject this reject
|
||||
*/
|
||||
private boolean shouldRejectBundleEntry(
|
||||
RuntimeResourceDefinition theResourceDef, RestOperationTypeEnum theOperation) {
|
||||
final boolean isResourceParameters = PARAMETERS.equals(theResourceDef.getName());
|
||||
final boolean isResourceBundle = BUNDLE.equals(theResourceDef.getName());
|
||||
final boolean isOperationPatch = theOperation == RestOperationTypeEnum.PATCH;
|
||||
|
||||
return (isResourceParameters && !isOperationPatch) || isResourceBundle;
|
||||
}
|
||||
|
||||
private void setTargetFromResourceId(RequestDetails theRequestDetails, FhirContext ctx, RuleTarget target) {
|
||||
String[] idValues = theRequestDetails.getParameters().get(SP_RES_ID);
|
||||
target.resourceIds = new ArrayList<>();
|
||||
@ -909,7 +932,7 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ {
|
||||
|
||||
private boolean requestAppliesToTransaction(
|
||||
FhirContext theContext, RuleOpEnum theOp, IBaseResource theInputResource) {
|
||||
if (!"Bundle".equals(theContext.getResourceType(theInputResource))) {
|
||||
if (!BUNDLE.equals(theContext.getResourceType(theInputResource))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
@ -21,7 +21,7 @@
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-caching-api</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@ -44,6 +44,9 @@ public class CacheProvider<K, V> implements ca.uhn.fhir.sl.cache.CacheProvider<K
|
||||
public Cache<K, V> create(long timeoutMillis, long maximumSize) {
|
||||
return new CacheDelegator<K, V>(Caffeine.newBuilder()
|
||||
.expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS)
|
||||
// Caffeine locks the whole array when growing the hash table.
|
||||
// Set initial capacity to max to avoid this. All our caches are <1M entries.
|
||||
.initialCapacity((int) maximumSize)
|
||||
.maximumSize(maximumSize)
|
||||
.build());
|
||||
}
|
||||
@ -51,6 +54,9 @@ public class CacheProvider<K, V> implements ca.uhn.fhir.sl.cache.CacheProvider<K
|
||||
public LoadingCache<K, V> create(long timeoutMillis, long maximumSize, CacheLoader<K, V> loading) {
|
||||
return new LoadingCacheDelegator<K, V>(Caffeine.newBuilder()
|
||||
.expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS)
|
||||
// Caffeine locks the whole array when growing the hash table.
|
||||
// Set initial capacity to max to avoid this. All our caches are <1M entries.
|
||||
.initialCapacity((int) maximumSize)
|
||||
.maximumSize(maximumSize)
|
||||
.build(loading::load));
|
||||
}
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -597,11 +597,11 @@ public class Builder {
|
||||
"Only SELECT statements (including CTEs) are allowed here. Please check your SQL: [%s]",
|
||||
theSql));
|
||||
}
|
||||
ourLog.info("SQL to evaluate: {}", theSql);
|
||||
ourLog.debug("SQL to evaluate: {}", theSql);
|
||||
|
||||
myTask.addPrecondition(new ExecuteTaskPrecondition(
|
||||
() -> {
|
||||
ourLog.info("Checking precondition for SQL: {}", theSql);
|
||||
ourLog.debug("Checking precondition for SQL: {}", theSql);
|
||||
return MigrationJdbcUtils.queryForSingleBooleanResultMultipleThrowsException(
|
||||
theSql, myTask.newJdbcTemplate());
|
||||
},
|
||||
@ -614,6 +614,11 @@ public class Builder {
|
||||
myTask.setRunDuringSchemaInitialization(true);
|
||||
return this;
|
||||
}
|
||||
|
||||
public BuilderCompleteTask setTransactional(boolean theFlag) {
|
||||
myTask.setTransactional(theFlag);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public class BuilderAddTableRawSql {
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -502,6 +502,9 @@ public class BulkDataExportProvider {
|
||||
|
||||
String serverBase = getServerBase(theRequestDetails);
|
||||
|
||||
// an output is required, even if empty, according to HL7 FHIR IG
|
||||
bulkResponseDocument.getOutput();
|
||||
|
||||
for (Map.Entry<String, List<String>> entrySet :
|
||||
results.getResourceTypeToBinaryIds().entrySet()) {
|
||||
String resourceType = entrySet.getKey();
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.0-SNAPSHOT</version>
|
||||
<version>7.0.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user