Merge branch 'master' into mb-fix-history-prefetch

# Conflicts:
#	hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
This commit is contained in:
Michael Buckley 2024-04-26 15:38:35 -04:00
commit 410aeb6232
306 changed files with 8059 additions and 1608 deletions

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
@ -138,6 +138,11 @@
<artifactId>system-stubs-jupiter</artifactId> <artifactId>system-stubs-jupiter</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- OpenTelemetry -->
<dependency>
<groupId>io.opentelemetry</groupId>
<artifactId>opentelemetry-api</artifactId>
</dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -1080,10 +1080,52 @@ public enum Pointcut implements IPointcut {
SUBSCRIPTION_TOPIC_AFTER_PERSISTED_RESOURCE_CHECKED( SUBSCRIPTION_TOPIC_AFTER_PERSISTED_RESOURCE_CHECKED(
void.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"), void.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"),
/**
* <b>Storage Hook:</b>
* Invoked when a Bulk Export job is being kicked off, but before any permission checks
* have been done.
* This hook can be used to modify or update parameters as need be before
* authorization/permission checks are done.
* <p>
* Hooks may accept the following parameters:
* </p>
* <ul>
* <li>
* ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions - The details of the job being kicked off
* </li>
* <li>
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. Note that the bean
* properties are not all guaranteed to be populated, depending on how early during processing the
* exception occurred. <b>Note that this parameter may be null in contexts where the request is not
* known, such as while processing searches</b>
* </li>
* <li>
* ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
* </li>
* </ul>
* <p>
* Hooks should return <code>void</code>, and can throw exceptions.
* </p>
*/
STORAGE_PRE_INITIATE_BULK_EXPORT(
void.class,
"ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters",
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"),
/** /**
* <b>Storage Hook:</b> * <b>Storage Hook:</b>
* Invoked when a Bulk Export job is being kicked off. Hook methods may modify * Invoked when a Bulk Export job is being kicked off. Hook methods may modify
* the request, or raise an exception to prevent it from being initiated. * the request, or raise an exception to prevent it from being initiated.
*
* This hook is not guaranteed to be called before permission checks, and so
* anu implementers should be cautious of changing the options in ways that would
* affect permissions.
* <p> * <p>
* Hooks may accept the following parameters: * Hooks may accept the following parameters:
* </p> * </p>
@ -2909,7 +2951,9 @@ public enum Pointcut implements IPointcut {
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
"ca.uhn.fhir.jpa.util.SqlQueryList"), "ca.uhn.fhir.jpa.util.SqlQueryList"),
@Deprecated(since = "7.2.0 - Use STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX instead.")
/** /**
* <b> Deprecated but still supported. Will eventually be removed. <code>Please use Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX</code> </b>
* <b> Binary Blob Prefix Assigning Hook:</b> * <b> Binary Blob Prefix Assigning Hook:</b>
* <p> * <p>
* Immediately before a binary blob is stored to its eventual data sink, this hook is called. * Immediately before a binary blob is stored to its eventual data sink, this hook is called.
@ -2935,6 +2979,32 @@ public enum Pointcut implements IPointcut {
"ca.uhn.fhir.rest.api.server.RequestDetails", "ca.uhn.fhir.rest.api.server.RequestDetails",
"org.hl7.fhir.instance.model.api.IBaseResource"), "org.hl7.fhir.instance.model.api.IBaseResource"),
/**
* <b> Binary Content Prefix Assigning Hook:</b>
* <p>
* Immediately before binary content is stored to its eventual data sink, this hook is called.
* This hook allows implementers to provide a prefix to the binary content's ID.
* This is helpful in cases where you want to identify this blob for later retrieval outside of HAPI-FHIR. Note that allowable characters will depend on the specific storage sink being used.
* <ul>
* <li>
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
* pulled out of the servlet request. Note that the bean
* properties are not all guaranteed to be populated.
* </li>
* <li>
* org.hl7.fhir.instance.model.api.IBaseBinary - The binary resource that is about to be stored.
* </li>
* </ul>
* <p>
* Hooks should return <code>String</code>, which represents the full prefix to be applied to the blob.
* </p>
*/
STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX(
String.class,
"ca.uhn.fhir.rest.api.server.RequestDetails",
"org.hl7.fhir.instance.model.api.IBaseResource"),
/** /**
* <b>Storage Hook:</b> * <b>Storage Hook:</b>
* Invoked before a batch job is persisted to the database. * Invoked before a batch job is persisted to the database.

View File

@ -31,6 +31,8 @@ import ca.uhn.fhir.util.ReflectionUtil;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap; import com.google.common.collect.ListMultimap;
import io.opentelemetry.api.trace.Span;
import io.opentelemetry.instrumentation.annotations.WithSpan;
import jakarta.annotation.Nonnull; import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable; import jakarta.annotation.Nullable;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
@ -547,7 +549,7 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
// Invoke the method // Invoke the method
try { try {
return myMethod.invoke(getInterceptor(), args); return invokeMethod(args);
} catch (InvocationTargetException e) { } catch (InvocationTargetException e) {
Throwable targetException = e.getTargetException(); Throwable targetException = e.getTargetException();
if (myPointcut.isShouldLogAndSwallowException(targetException)) { if (myPointcut.isShouldLogAndSwallowException(targetException)) {
@ -566,6 +568,19 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
throw new InternalErrorException(Msg.code(1911) + e); throw new InternalErrorException(Msg.code(1911) + e);
} }
} }
@WithSpan("hapifhir.interceptor")
private Object invokeMethod(Object[] args) throws InvocationTargetException, IllegalAccessException {
// Add attributes to the opentelemetry span
Span currentSpan = Span.current();
currentSpan.setAttribute("hapifhir.interceptor.pointcut_name", myPointcut.name());
currentSpan.setAttribute(
"hapifhir.interceptor.class_name",
myMethod.getDeclaringClass().getName());
currentSpan.setAttribute("hapifhir.interceptor.method_name", myMethod.getName());
return myMethod.invoke(getInterceptor(), args);
}
} }
protected class HookDescriptor { protected class HookDescriptor {

View File

@ -609,12 +609,14 @@ public abstract class BaseParser implements IParser {
private boolean isStripVersionsFromReferences( private boolean isStripVersionsFromReferences(
CompositeChildElement theCompositeChildElement, IBaseResource theResource) { CompositeChildElement theCompositeChildElement, IBaseResource theResource) {
Set<String> autoVersionReferencesAtPathExtensions = if (theResource != null) {
MetaUtil.getAutoVersionReferencesAtPath(theResource.getMeta(), myContext.getResourceType(theResource)); Set<String> autoVersionReferencesAtPathExtensions = MetaUtil.getAutoVersionReferencesAtPath(
theResource.getMeta(), myContext.getResourceType(theResource));
if (!autoVersionReferencesAtPathExtensions.isEmpty() if (!autoVersionReferencesAtPathExtensions.isEmpty()
&& theCompositeChildElement.anyPathMatches(autoVersionReferencesAtPathExtensions)) { && theCompositeChildElement.anyPathMatches(autoVersionReferencesAtPathExtensions)) {
return false; return false;
}
} }
Boolean stripVersionsFromReferences = myStripVersionsFromReferences; Boolean stripVersionsFromReferences = myStripVersionsFromReferences;
@ -622,21 +624,20 @@ public abstract class BaseParser implements IParser {
return stripVersionsFromReferences; return stripVersionsFromReferences;
} }
if (myContext.getParserOptions().isStripVersionsFromReferences() == false) { if (!myContext.getParserOptions().isStripVersionsFromReferences()) {
return false; return false;
} }
Set<String> dontStripVersionsFromReferencesAtPaths = myDontStripVersionsFromReferencesAtPaths; Set<String> dontStripVersionsFromReferencesAtPaths = myDontStripVersionsFromReferencesAtPaths;
if (dontStripVersionsFromReferencesAtPaths != null) { if (dontStripVersionsFromReferencesAtPaths != null
if (dontStripVersionsFromReferencesAtPaths.isEmpty() == false && !dontStripVersionsFromReferencesAtPaths.isEmpty()
&& theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths)) { && theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths)) {
return false; return false;
}
} }
dontStripVersionsFromReferencesAtPaths = dontStripVersionsFromReferencesAtPaths =
myContext.getParserOptions().getDontStripVersionsFromReferencesAtPaths(); myContext.getParserOptions().getDontStripVersionsFromReferencesAtPaths();
return dontStripVersionsFromReferencesAtPaths.isEmpty() != false return dontStripVersionsFromReferencesAtPaths.isEmpty()
|| !theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths); || !theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths);
} }

View File

@ -112,6 +112,7 @@ public enum VersionEnum {
V6_2_5, V6_2_5,
// Dev Build // Dev Build
V6_3_0, V6_3_0,
V6_4_0, V6_4_0,
V6_4_1, V6_4_1,
V6_4_2, V6_4_2,
@ -119,10 +120,13 @@ public enum VersionEnum {
V6_4_4, V6_4_4,
V6_4_5, V6_4_5,
V6_4_6, V6_4_6,
V6_5_0, V6_5_0,
V6_6_0, V6_6_0,
V6_6_1, V6_6_1,
V6_6_2, V6_6_2,
V6_7_0, V6_7_0,
V6_8_0, V6_8_0,
V6_8_1, V6_8_1,
@ -130,21 +134,36 @@ public enum VersionEnum {
V6_8_3, V6_8_3,
V6_8_4, V6_8_4,
V6_8_5, V6_8_5,
V6_8_6,
V6_8_7,
V6_9_0, V6_9_0,
V6_10_0, V6_10_0,
V6_10_1, V6_10_1,
V6_10_2, V6_10_2,
V6_10_3, V6_10_3,
V6_10_4,
V6_10_5,
V6_11_0, V6_11_0,
V7_0_0, V7_0_0,
V7_0_1, V7_0_1,
V7_0_2,
V7_1_0, V7_1_0,
V7_2_0; V7_2_0,
V7_3_0,
V7_4_0;
public static VersionEnum latestVersion() { public static VersionEnum latestVersion() {
VersionEnum[] values = VersionEnum.values(); VersionEnum[] values = VersionEnum.values();
return values[values.length - 1]; return values[values.length - 1];
} }
public boolean isNewerThan(VersionEnum theVersionEnum) {
return ordinal() > theVersionEnum.ordinal();
}
} }

View File

@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId> <artifactId>hapi-fhir-bom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>HAPI FHIR BOM</name> <name>HAPI FHIR BOM</name>
@ -12,7 +12,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -218,6 +218,8 @@ public class HapiFlywayMigrateDatabaseCommandTest {
"-p", "SA" "-p", "SA"
}; };
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BINARY_STORAGE_BLOB"));
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BINARY_STORAGE"));
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RESOURCE")); assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RESOURCE"));
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB")); assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB"));
App.main(args); App.main(args);

View File

@ -60,6 +60,7 @@ create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varch
create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(50) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID)); create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(50) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID));
create table TRM_CONCEPT_PC_LINK (PID bigint not null, CHILD_PID bigint, PARENT_PID bigint, REL_TYPE integer, CODESYSTEM_PID bigint not null, primary key (PID)); create table TRM_CONCEPT_PC_LINK (PID bigint not null, CHILD_PID bigint, PARENT_PID bigint, REL_TYPE integer, CODESYSTEM_PID bigint not null, primary key (PID));
create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CONCEPT_PID bigint, primary key (PID)); create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CONCEPT_PID bigint, primary key (PID));
create table HFJ_BINARY_STORAGE_BLOB ( BLOB_ID varchar(200) not null, BLOB_DATA blob not null, CONTENT_TYPE varchar(100) not null, BLOB_HASH varchar(128), PUBLISHED_DATE timestamp(6) not null, RESOURCE_ID varchar(100) not null, BLOB_SIZE bigint, primary key (BLOB_ID) );
create index IDX_FORCEDID_TYPE_FORCEDID on HFJ_FORCED_ID (RESOURCE_TYPE, FORCED_ID); create index IDX_FORCEDID_TYPE_FORCEDID on HFJ_FORCED_ID (RESOURCE_TYPE, FORCED_ID);
create unique index IDX_FORCEDID_RESID on HFJ_FORCED_ID (RESOURCE_PID); create unique index IDX_FORCEDID_RESID on HFJ_FORCED_ID (RESOURCE_PID);
create unique index IDX_FORCEDID_TYPE_RESID on HFJ_FORCED_ID (RESOURCE_TYPE, RESOURCE_PID); create unique index IDX_FORCEDID_TYPE_RESID on HFJ_FORCED_ID (RESOURCE_TYPE, RESOURCE_PID);

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId> <artifactId>hapi-fhir-cli</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -0,0 +1,3 @@
---
release-date: "2024-01-17"
codename: "Zed"

View File

@ -0,0 +1,3 @@
---
release-date: "2024-01-31"
codename: "Zed"

View File

@ -0,0 +1,2 @@
The known issue with Bulk Export in HAPI 6.4.0 has been resolved. Bulk export functionality is now
more performant at large scale, and does not generate occasional incomplete file reports.

View File

@ -0,0 +1,3 @@
---
release-date: "2023-02-27"
codename: "Wizard"

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 4636
title: "Fixed Oracle syntax problem with the fix that was previously provided via Pull Request 4630."

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 4204
jira: SMILE-4688
backport: 6.4.2
title: "With default configuration, Resource meta.tag properties: `userSelected` and `version`, were not stored in the database.
This is now fixed."

View File

@ -1,4 +1,5 @@
--- ---
type: fix type: fix
issue: 4548 issue: 4548
backport: 6.4.2
title: "Simultaneous DELETE and $reindex operations could corrupt the search index. This has been fixed." title: "Simultaneous DELETE and $reindex operations could corrupt the search index. This has been fixed."

View File

@ -2,5 +2,6 @@
type: fix type: fix
issue: 5008 issue: 5008
jira: SMILE-6727 jira: SMILE-6727
backport: 6.4.6
title: "Previously, the GenericClient would only support the HTTP GET method for paging requests. title: "Previously, the GenericClient would only support the HTTP GET method for paging requests.
This has been corrected by adding the possibility for paging with an HTTP POST." This has been corrected by adding the possibility for paging with an HTTP POST."

View File

@ -0,0 +1 @@
Fixed migration related issues

View File

@ -0,0 +1,3 @@
---
release-date: "2023-12-06"
codename: "Yucatán"

View File

@ -0,0 +1,6 @@
This release strictly attempts to remove as much usage of the LOB table as possible in postgres. Specifically, in the JPA server, several database columns related to Batch2 jobs and searching
have been reworked so that they no will longer use LOB datatypes going forward. This
is a significant advantage on Postgresql databases as it removes a significant use
of the inefficient `pg_largeobject` table, and should yield performance boosts for
MSSQL as well.

View File

@ -0,0 +1,3 @@
---
release-date: "2024-03-01"
codename: "Yucatán"

View File

@ -2,6 +2,6 @@
type: fix type: fix
issue: 5486 issue: 5486
jira: SMILE-7457 jira: SMILE-7457
backport: 6.10.1 backport: 6.8.6,6.10.1
title: "Previously, testing database migration with cli migrate-database command in dry-run mode would insert in the title: "Previously, testing database migration with cli migrate-database command in dry-run mode would insert in the
migration task table. The issue has been fixed." migration task table. The issue has been fixed."

View File

@ -1,7 +1,7 @@
--- ---
type: fix type: fix
issue: 5511 issue: 5511
backport: 6.10.1 backport: 6.8.6,6.10.1
title: "Previously, when creating an index as a part of a migration, if the index already existed with a different name title: "Previously, when creating an index as a part of a migration, if the index already existed with a different name
on Oracle, the migration would fail. This has been fixed so that the create index migration task now recovers with on Oracle, the migration would fail. This has been fixed so that the create index migration task now recovers with
a warning message if the index already exists with a different name." a warning message if the index already exists with a different name."

View File

@ -1,5 +1,6 @@
--- ---
type: fix type: fix
issue: 5563 issue: 5563
backport: 6.10.3
title: "Previously, certain mdm configuration could lead to duplicate eid identifier title: "Previously, certain mdm configuration could lead to duplicate eid identifier
entries in golden resources. This has been corrected" entries in golden resources. This has been corrected"

View File

@ -1,6 +1,7 @@
--- ---
type: fix type: fix
issue: 5589 issue: 5589
backport: 6.10.5
title: "When encoding a Bundle, if resources in bundle entries had a value in title: "When encoding a Bundle, if resources in bundle entries had a value in
`Bundle.entry.fullUrl` but no value in `Bundle.entry.resource.id`, the parser `Bundle.entry.fullUrl` but no value in `Bundle.entry.resource.id`, the parser
sometimes incorrectly moved these resources to be contained within other sometimes incorrectly moved these resources to be contained within other

View File

@ -1,6 +1,8 @@
--- ---
type: fix type: fix
issue: 5589 issue: 5589
backport: 6.10.5
title: "When encoding resources using the RDF parser, placeholder IDs (i.e. resource IDs title: "When encoding resources using the RDF parser, placeholder IDs (i.e. resource IDs
starting with `urn:`) were not omitted as they are in the XML and JSON parsers. This has starting with `urn:`) were not omitted as they are in the XML and JSON parsers. This has
been corrected." been corrected."

View File

@ -1,4 +1,5 @@
--- ---
type: fix type: fix
issue: 5621 issue: 5621
backport: 6.10.4
title: "Fixed a deadlock in resource conditional create." title: "Fixed a deadlock in resource conditional create."

View File

@ -1,5 +1,6 @@
--- ---
type: fix type: fix
issue: 5623 issue: 5623
backport: 6.10.5
title: "Previously, searches that used more than one chained `Bundle` `SearchParameter` (i.e. `Composition`) were only title: "Previously, searches that used more than one chained `Bundle` `SearchParameter` (i.e. `Composition`) were only
adding one condition to the underlying SQL query which resulted in incorrect search results. This has been fixed." adding one condition to the underlying SQL query which resulted in incorrect search results. This has been fixed."

View File

@ -0,0 +1,3 @@
---
release-date: "2024-03-01"
codename: "Zed"

View File

@ -1,4 +1,5 @@
--- ---
type: fix type: fix
issue: 5671 issue: 5671
backport: 7.0.1
title: "Avoid lock contention by refreshing SearchParameter cache in a new transaction." title: "Avoid lock contention by refreshing SearchParameter cache in a new transaction."

View File

@ -1,4 +1,5 @@
--- ---
type: security type: security
issue: 5717 issue: 5717
backport: 7.0.1
title: "Fixed a potential XSS vulnerability in the HAPI FHIR Testpage Overlay module." title: "Fixed a potential XSS vulnerability in the HAPI FHIR Testpage Overlay module."

View File

@ -1,6 +1,7 @@
--- ---
type: fix type: fix
issue: 5722 issue: 5722
backport: 7.0.1
title: "An incorrect migration script caused a failure when upgrading to HAPI FHIR 7.0.0 on title: "An incorrect migration script caused a failure when upgrading to HAPI FHIR 7.0.0 on
PostgreSQL if the database was not in the `public` schema. Thanks to GitHub PostgreSQL if the database was not in the `public` schema. Thanks to GitHub
user @pano-smals for the contribution!" user @pano-smals for the contribution!"

View File

@ -0,0 +1,8 @@
---
type: fix
issue: 5725
jira: SMILE-8343
title: "The recommended constructor was not present on hibernate dialects
provided by HAPI FHIR, leading to warnings during startup, and failures
in some cases. This has been corrected. Thanks to GitHub user
@pano-smals for the contribution!"

View File

@ -1,4 +1,5 @@
--- ---
type: fix type: fix
issue: 5742 issue: 5742
title: Fixed behaviour of the _language query parameter. Now it is picked up as search parameter in the resource provider and filters accordingly. Thanks to Jens Villadsen (@jkiddo) for the contribution! backport: 7.0.1
title: "Fixed behaviour of the _language query parameter. Now it is picked up as search parameter in the resource provider and filters accordingly. Thanks to Jens Villadsen (@jkiddo) for the contribution!"

View File

@ -1,6 +1,7 @@
--- ---
type: perf type: perf
issue: 5748 issue: 5748
backport: 6.8.7
title: "In the JPA server, several database columns related to Batch2 jobs and searching title: "In the JPA server, several database columns related to Batch2 jobs and searching
have been reworked so that they no will longer use LOB datatypes going forward. This have been reworked so that they no will longer use LOB datatypes going forward. This
is a significant advantage on Postgresql databases as it removes a significant use is a significant advantage on Postgresql databases as it removes a significant use

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 5812
title: "Previously, the 'planDefinition' resource parameter for the Dstu3 version of PlanDefinition/$apply was incorrectly defined as an R4 resource. This issue has been fixed."

View File

@ -0,0 +1,5 @@
---
type: change
issue: 5814
title: "Extracted methods out of ResourceProviderFactory into ObservableSupplierSet so that functionality can be used by
other services. Unit tests revealed a cleanup bug in MdmProviderLoader that is fixed in this MR."

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 5820
title: "Unifying the code paths for Patient type export and Patient instance export.
These paths should be the same, since type is defined by spec, but instance
is just 'syntactic sugar' on top of that spec (and so should be the same).
"

View File

@ -0,0 +1,8 @@
---
type: fix
issue: 5828
title: "When batch 2 jobs with Reduction steps fail in the final part
of the reduction step, this would often leave the job
stuck in the FINALIZE state.
This has been fixed; the job will now FAIL.
"

View File

@ -0,0 +1,5 @@
---
type: perf
issue: 5838
title: "Migration of remaining database columns still using the LOB datatypes. This change effectively cuts all ties
with the inefficient `pg_largeobject` table."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 5842
title: "Fixed a bug where an NPE was being thrown when trying to serialize a FHIR fragment (e.g., backboneElement
, compound datatype) to a string representation if the fragment contains a `Reference`."

View File

@ -0,0 +1,11 @@
---
type: add
issue: 5845
title: "Added a new pointcut: STORAGE_PRE_INITIATE_BULK_EXPORT.
This pointcut is meant to be called explicitly before
STORAGE_INITIATE_BULK_EXPORT, so that parameter
manipulation that needs to be done before permission
checks (currently done using STORAGE_INITIATE_BULK_EXPORT)
can safely be done without risk of affecting permission checks/
authorization.
"

View File

@ -0,0 +1,6 @@
---
type: add
issue: 5850
title: "Add additional Clinical Reasoning operations $collect-data and $data-requirements to HAPI-FHIR to expand capability
for clinical reasoning module users. These operations will assist users in identifying data requirements for evaluating a measure, and what
data was used for a measure evaluation"

View File

@ -0,0 +1,6 @@
---
type: add
issue: 5855
title: "If using an OpenTelemetry agent, a span is now generated for each interceptor method call. The span is named
as 'hapifhir.interceptor' and it has the following attributes about the interceptor:
'hapifhir.interceptor.pointcut_name','hapifhir.interceptor.class_name', 'hapifhir.interceptor.method_name'"

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 5856
title: "Previously, it was possible to execute the `$delete-expunge` operation on a resource even if the user did not
have delete permissions for the given resource type. This has been fixed."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 5874
jira: SMILE-8149
title: "Fixed a bug where 'List' would be incorrectly shown as 'ListResource' in the error response for a GET for an invalid resource."

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 5867
title: "Previously, when adding multiple resources and defining a golden resource using MDM, the golden resource's tags were removed. This has been fixed"

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 5877
title: "Previously, updating a tokenParam with a value greater than 200 characters would raise a SQLException.
This issue has been fixed."

View File

@ -0,0 +1,3 @@
---
release-date: "2024-08-18"
codename: "Copernicus"

View File

@ -11,7 +11,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -20,7 +20,7 @@
package ca.uhn.fhir.jpa.model.dialect; package ca.uhn.fhir.jpa.model.dialect;
import org.hibernate.dialect.CockroachDialect; import org.hibernate.dialect.CockroachDialect;
import org.hibernate.dialect.DatabaseVersion; import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
/** /**
* Dialect for CockroachDB database. * Dialect for CockroachDB database.
@ -29,7 +29,11 @@ import org.hibernate.dialect.DatabaseVersion;
public class HapiFhirCockroachDialect extends CockroachDialect { public class HapiFhirCockroachDialect extends CockroachDialect {
public HapiFhirCockroachDialect() { public HapiFhirCockroachDialect() {
super(DatabaseVersion.make(21, 1)); super();
}
public HapiFhirCockroachDialect(DialectResolutionInfo info) {
super(info);
} }
/** /**

View File

@ -19,8 +19,8 @@
*/ */
package ca.uhn.fhir.jpa.model.dialect; package ca.uhn.fhir.jpa.model.dialect;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.DerbyDialect; import org.hibernate.dialect.DerbyDialect;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
/** /**
* Dialect for Derby database. * Dialect for Derby database.
@ -28,12 +28,12 @@ import org.hibernate.dialect.DerbyDialect;
*/ */
public class HapiFhirDerbyDialect extends DerbyDialect { public class HapiFhirDerbyDialect extends DerbyDialect {
public HapiFhirDerbyDialect(DatabaseVersion theDatabaseVersion) { public HapiFhirDerbyDialect() {
super(theDatabaseVersion); super();
} }
public HapiFhirDerbyDialect() { public HapiFhirDerbyDialect(DialectResolutionInfo info) {
super(DatabaseVersion.make(10, 14, 2)); super(info);
} }
/** /**

View File

@ -19,8 +19,8 @@
*/ */
package ca.uhn.fhir.jpa.model.dialect; package ca.uhn.fhir.jpa.model.dialect;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.H2Dialect; import org.hibernate.dialect.H2Dialect;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
/** /**
* Dialect for H2 database. * Dialect for H2 database.
@ -28,12 +28,12 @@ import org.hibernate.dialect.H2Dialect;
*/ */
public class HapiFhirH2Dialect extends H2Dialect { public class HapiFhirH2Dialect extends H2Dialect {
public HapiFhirH2Dialect(DatabaseVersion theDatabaseVersion) { public HapiFhirH2Dialect() {
super(theDatabaseVersion); super();
} }
public HapiFhirH2Dialect() { public HapiFhirH2Dialect(DialectResolutionInfo info) {
super(DatabaseVersion.make(2, 2, 220)); super(info);
} }
/** /**

View File

@ -19,8 +19,8 @@
*/ */
package ca.uhn.fhir.jpa.model.dialect; package ca.uhn.fhir.jpa.model.dialect;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.MariaDBDialect; import org.hibernate.dialect.MariaDBDialect;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
/** /**
* Dialect for MySQL database. * Dialect for MySQL database.
@ -28,12 +28,12 @@ import org.hibernate.dialect.MariaDBDialect;
*/ */
public class HapiFhirMariaDBDialect extends MariaDBDialect { public class HapiFhirMariaDBDialect extends MariaDBDialect {
public HapiFhirMariaDBDialect(DatabaseVersion theDatabaseVersion) { public HapiFhirMariaDBDialect() {
super(theDatabaseVersion); super();
} }
public HapiFhirMariaDBDialect() { public HapiFhirMariaDBDialect(DialectResolutionInfo info) {
super(DatabaseVersion.make(10, 11, 5)); super(info);
} }
/** /**

View File

@ -19,8 +19,8 @@
*/ */
package ca.uhn.fhir.jpa.model.dialect; package ca.uhn.fhir.jpa.model.dialect;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.MySQLDialect; import org.hibernate.dialect.MySQLDialect;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
/** /**
* Dialect for MySQL database. * Dialect for MySQL database.
@ -28,12 +28,12 @@ import org.hibernate.dialect.MySQLDialect;
*/ */
public class HapiFhirMySQLDialect extends MySQLDialect { public class HapiFhirMySQLDialect extends MySQLDialect {
public HapiFhirMySQLDialect(DatabaseVersion theDatabaseVersion) { public HapiFhirMySQLDialect() {
super(theDatabaseVersion); super();
} }
public HapiFhirMySQLDialect() { public HapiFhirMySQLDialect(DialectResolutionInfo info) {
super(DatabaseVersion.make(5, 7)); super(info);
} }
/** /**

View File

@ -19,8 +19,8 @@
*/ */
package ca.uhn.fhir.jpa.model.dialect; package ca.uhn.fhir.jpa.model.dialect;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
/** /**
* Dialect for Oracle database. * Dialect for Oracle database.
@ -28,12 +28,12 @@ import org.hibernate.dialect.OracleDialect;
*/ */
public class HapiFhirOracleDialect extends OracleDialect { public class HapiFhirOracleDialect extends OracleDialect {
public HapiFhirOracleDialect(DatabaseVersion theDatabaseVersion) { public HapiFhirOracleDialect() {
super(theDatabaseVersion); super();
} }
public HapiFhirOracleDialect() { public HapiFhirOracleDialect(DialectResolutionInfo info) {
super(DatabaseVersion.make(12, 2)); super(info);
} }
/** /**

View File

@ -19,17 +19,17 @@
*/ */
package ca.uhn.fhir.jpa.model.dialect; package ca.uhn.fhir.jpa.model.dialect;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.PostgreSQLDialect; import org.hibernate.dialect.PostgreSQLDialect;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
public class HapiFhirPostgresDialect extends PostgreSQLDialect { public class HapiFhirPostgresDialect extends PostgreSQLDialect {
public HapiFhirPostgresDialect(DatabaseVersion theDatabaseVersion) { public HapiFhirPostgresDialect() {
super(theDatabaseVersion); super();
} }
public HapiFhirPostgresDialect() { public HapiFhirPostgresDialect(DialectResolutionInfo info) {
super(DatabaseVersion.make(10, 0, 0)); super(info);
} }
/** /**

View File

@ -19,8 +19,8 @@
*/ */
package ca.uhn.fhir.jpa.model.dialect; package ca.uhn.fhir.jpa.model.dialect;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.SQLServerDialect; import org.hibernate.dialect.SQLServerDialect;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
/** /**
* Dialect for MS SQL Server database. * Dialect for MS SQL Server database.
@ -28,12 +28,12 @@ import org.hibernate.dialect.SQLServerDialect;
*/ */
public class HapiFhirSQLServerDialect extends SQLServerDialect { public class HapiFhirSQLServerDialect extends SQLServerDialect {
public HapiFhirSQLServerDialect(DatabaseVersion theDatabaseVersion) { public HapiFhirSQLServerDialect() {
super(theDatabaseVersion); super();
} }
public HapiFhirSQLServerDialect() { public HapiFhirSQLServerDialect(DialectResolutionInfo info) {
super(DatabaseVersion.make(11)); super(info);
} }
/** /**

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -41,6 +41,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.io.ByteArrayInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
@ -50,7 +51,7 @@ import java.util.Date;
import java.util.Optional; import java.util.Optional;
@Transactional @Transactional
public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { public class DatabaseBinaryContentStorageSvcImpl extends BaseBinaryStorageSvcImpl {
@PersistenceContext(type = PersistenceContextType.TRANSACTION) @PersistenceContext(type = PersistenceContextType.TRANSACTION)
private EntityManager myEntityManager; private EntityManager myEntityManager;
@ -61,9 +62,9 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
@Nonnull @Nonnull
@Override @Override
@Transactional(propagation = Propagation.REQUIRED) @Transactional(propagation = Propagation.REQUIRED)
public StoredDetails storeBlob( public StoredDetails storeBinaryContent(
IIdType theResourceId, IIdType theResourceId,
String theBlobIdOrNull, String theBinaryContentIdOrNull,
String theContentType, String theContentType,
InputStream theInputStream, InputStream theInputStream,
RequestDetails theRequestDetails) RequestDetails theRequestDetails)
@ -82,14 +83,20 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
BinaryStorageEntity entity = new BinaryStorageEntity(); BinaryStorageEntity entity = new BinaryStorageEntity();
entity.setResourceId(theResourceId.toUnqualifiedVersionless().getValue()); entity.setResourceId(theResourceId.toUnqualifiedVersionless().getValue());
entity.setBlobContentType(theContentType); entity.setContentType(theContentType);
entity.setPublished(publishedDate); entity.setPublished(publishedDate);
Session session = (Session) myEntityManager.getDelegate(); Session session = (Session) myEntityManager.getDelegate();
LobHelper lobHelper = session.getLobHelper(); LobHelper lobHelper = session.getLobHelper();
byte[] loadedStream = IOUtils.toByteArray(countingInputStream); byte[] loadedStream = IOUtils.toByteArray(countingInputStream);
String id = super.provideIdForNewBlob(theBlobIdOrNull, loadedStream, theRequestDetails, theContentType); String id = super.provideIdForNewBinaryContent(
entity.setBlobId(id); theBinaryContentIdOrNull, loadedStream, theRequestDetails, theContentType);
entity.setContentId(id);
entity.setStorageContentBin(loadedStream);
// TODO: remove writing Blob in a future release
Blob dataBlob = lobHelper.createBlob(loadedStream); Blob dataBlob = lobHelper.createBlob(loadedStream);
entity.setBlob(dataBlob); entity.setBlob(dataBlob);
@ -103,7 +110,7 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
myEntityManager.persist(entity); myEntityManager.persist(entity);
return new StoredDetails() return new StoredDetails()
.setBlobId(id) .setBinaryContentId(id)
.setBytes(bytes) .setBytes(bytes)
.setPublished(publishedDate) .setPublished(publishedDate)
.setHash(hash) .setHash(hash)
@ -111,68 +118,98 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
} }
@Override @Override
public StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) { public StoredDetails fetchBinaryContentDetails(IIdType theResourceId, String theBinaryContentId) {
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId( Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(
theBlobId, theResourceId.toUnqualifiedVersionless().getValue()); theBinaryContentId, theResourceId.toUnqualifiedVersionless().getValue());
if (entityOpt.isEmpty()) { if (entityOpt.isEmpty()) {
return null; return null;
} }
BinaryStorageEntity entity = entityOpt.get(); BinaryStorageEntity entity = entityOpt.get();
return new StoredDetails() return new StoredDetails()
.setBlobId(theBlobId) .setBinaryContentId(theBinaryContentId)
.setContentType(entity.getBlobContentType()) .setContentType(entity.getContentType())
.setHash(entity.getHash()) .setHash(entity.getHash())
.setPublished(entity.getPublished()) .setPublished(entity.getPublished())
.setBytes(entity.getSize()); .setBytes(entity.getSize());
} }
@Override @Override
public boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) throws IOException { public boolean writeBinaryContent(IIdType theResourceId, String theBinaryContentId, OutputStream theOutputStream)
throws IOException {
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId( Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(
theBlobId, theResourceId.toUnqualifiedVersionless().getValue()); theBinaryContentId, theResourceId.toUnqualifiedVersionless().getValue());
if (entityOpt.isEmpty()) { if (entityOpt.isEmpty()) {
return false; return false;
} }
copyBlobToOutputStream(theOutputStream, entityOpt.get()); copyBinaryContentToOutputStream(theOutputStream, entityOpt.get());
return true; return true;
} }
@Override @Override
public void expungeBlob(IIdType theResourceId, String theBlobId) { public void expungeBinaryContent(IIdType theResourceId, String theBinaryContentId) {
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId( Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(
theBlobId, theResourceId.toUnqualifiedVersionless().getValue()); theBinaryContentId, theResourceId.toUnqualifiedVersionless().getValue());
entityOpt.ifPresent( entityOpt.ifPresent(
theBinaryStorageEntity -> myBinaryStorageEntityDao.deleteByPid(theBinaryStorageEntity.getBlobId())); theBinaryStorageEntity -> myBinaryStorageEntityDao.deleteByPid(theBinaryStorageEntity.getContentId()));
} }
@Override @Override
public byte[] fetchBlob(IIdType theResourceId, String theBlobId) throws IOException { public byte[] fetchBinaryContent(IIdType theResourceId, String theBinaryContentId) throws IOException {
BinaryStorageEntity entityOpt = myBinaryStorageEntityDao BinaryStorageEntity entityOpt = myBinaryStorageEntityDao
.findByIdAndResourceId( .findByIdAndResourceId(
theBlobId, theResourceId.toUnqualifiedVersionless().getValue()) theBinaryContentId,
theResourceId.toUnqualifiedVersionless().getValue())
.orElseThrow(() -> new ResourceNotFoundException( .orElseThrow(() -> new ResourceNotFoundException(
"Unknown blob ID: " + theBlobId + " for resource ID " + theResourceId)); "Unknown BinaryContent ID: " + theBinaryContentId + " for resource ID " + theResourceId));
return copyBlobToByteArray(entityOpt); return copyBinaryContentToByteArray(entityOpt);
} }
void copyBlobToOutputStream(OutputStream theOutputStream, BinaryStorageEntity theEntity) throws IOException { void copyBinaryContentToOutputStream(OutputStream theOutputStream, BinaryStorageEntity theEntity)
try (InputStream inputStream = theEntity.getBlob().getBinaryStream()) { throws IOException {
try (InputStream inputStream = getBinaryContent(theEntity)) {
IOUtils.copy(inputStream, theOutputStream); IOUtils.copy(inputStream, theOutputStream);
} catch (SQLException e) { } catch (SQLException e) {
throw new IOException(Msg.code(1341) + e); throw new IOException(Msg.code(1341) + e);
} }
} }
byte[] copyBlobToByteArray(BinaryStorageEntity theEntity) throws IOException { byte[] copyBinaryContentToByteArray(BinaryStorageEntity theEntity) throws IOException {
try { byte[] retVal;
return ByteStreams.toByteArray(theEntity.getBlob().getBinaryStream());
try (InputStream inputStream = getBinaryContent(theEntity)) {
retVal = ByteStreams.toByteArray(inputStream);
} catch (SQLException e) { } catch (SQLException e) {
throw new IOException(Msg.code(1342) + e); throw new IOException(Msg.code(1342) + e);
} }
return retVal;
}
/**
*
* The caller is responsible for closing the returned stream.
*
* @param theEntity
* @return
* @throws SQLException
*/
private InputStream getBinaryContent(BinaryStorageEntity theEntity) throws SQLException {
InputStream retVal;
if (theEntity.hasStorageContent()) {
retVal = new ByteArrayInputStream(theEntity.getStorageContentBin());
} else if (theEntity.hasBlob()) {
retVal = theEntity.getBlob().getBinaryStream();
} else {
retVal = new ByteArrayInputStream(new byte[0]);
}
return retVal;
} }
} }

View File

@ -208,8 +208,7 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
if (idChunk.size() >= 2) { if (idChunk.size() >= 2) {
List<ResourceTable> entityChunk = prefetchResourceTableHistoryAndProvenance(idChunk); List<ResourceTable> entityChunk = prefetchResourceTableHistoryAndProvenance(idChunk);
// fixme mb if (thePreFetchIndexes) {
if (thePreFetchIndexes && false) {
prefetchByField("string", "myParamsString", ResourceTable::isParamsStringPopulated, entityChunk); prefetchByField("string", "myParamsString", ResourceTable::isParamsStringPopulated, entityChunk);
prefetchByField("token", "myParamsToken", ResourceTable::isParamsTokenPopulated, entityChunk); prefetchByField("token", "myParamsToken", ResourceTable::isParamsTokenPopulated, entityChunk);
@ -240,10 +239,8 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
Query query = myEntityManager.createQuery("select r, h " Query query = myEntityManager.createQuery("select r, h "
+ " FROM ResourceTable r " + " FROM ResourceTable r "
+ " LEFT JOIN fetch ResourceHistoryTable h " + " LEFT JOIN fetch ResourceHistoryTable h "
// fixme make null safe + " on r.myVersion = h.myResourceVersion and r.id = h.myResourceId "
+ " on r.myVersion = h.myResourceVersion and r.id = h.myResourceId and r.myPartitionIdValue = h.myPartitionIdValue " + " left join fetch h.myProvenance "
+ " left join fetch ResourceHistoryProvenanceEntity p "
+ " on p.myResourceHistoryTable = h and p.myPartitionIdValue = h.myPartitionIdValue "
+ " WHERE r.myId IN ( :IDS ) "); + " WHERE r.myId IN ( :IDS ) ");
query.setParameter("IDS", idChunk); query.setParameter("IDS", idChunk);

View File

@ -58,6 +58,7 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.util.IMetaTagSorter; import ca.uhn.fhir.util.IMetaTagSorter;
import ca.uhn.fhir.util.MetaUtil; import ca.uhn.fhir.util.MetaUtil;
import jakarta.annotation.Nullable; import jakarta.annotation.Nullable;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseCoding; import org.hl7.fhir.instance.model.api.IBaseCoding;
@ -470,7 +471,7 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
res.getMeta().setLastUpdated(theEntity.getUpdatedDate()); res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
IDao.RESOURCE_PID.put(res, theEntity.getResourceId()); IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
if (theTagList != null) { if (CollectionUtils.isNotEmpty(theTagList)) {
res.getMeta().getTag().clear(); res.getMeta().getTag().clear();
res.getMeta().getProfile().clear(); res.getMeta().getProfile().clear();
res.getMeta().getSecurity().clear(); res.getMeta().getSecurity().clear();

View File

@ -29,11 +29,11 @@ import java.util.Optional;
public interface IBinaryStorageEntityDao extends JpaRepository<BinaryStorageEntity, String>, IHapiFhirJpaRepository { public interface IBinaryStorageEntityDao extends JpaRepository<BinaryStorageEntity, String>, IHapiFhirJpaRepository {
@Query("SELECT e FROM BinaryStorageEntity e WHERE e.myBlobId = :blob_id AND e.myResourceId = :resource_id") @Query("SELECT e FROM BinaryStorageEntity e WHERE e.myContentId = :content_id AND e.myResourceId = :resource_id")
Optional<BinaryStorageEntity> findByIdAndResourceId( Optional<BinaryStorageEntity> findByIdAndResourceId(
@Param("blob_id") String theBlobId, @Param("resource_id") String theResourceId); @Param("content_id") String theContentId, @Param("resource_id") String theResourceId);
@Modifying @Modifying
@Query("DELETE FROM BinaryStorageEntity t WHERE t.myBlobId = :pid") @Query("DELETE FROM BinaryStorageEntity t WHERE t.myContentId = :pid")
void deleteByPid(@Param("pid") String theId); void deleteByPid(@Param("pid") String theId);
} }

View File

@ -43,12 +43,14 @@ import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import jakarta.persistence.Temporal; import jakarta.persistence.Temporal;
import jakarta.persistence.TemporalType; import jakarta.persistence.TemporalType;
import jakarta.persistence.Transient;
import jakarta.persistence.UniqueConstraint; import jakarta.persistence.UniqueConstraint;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.Length;
import org.hibernate.search.engine.backend.types.Projectable; import org.hibernate.search.engine.backend.types.Projectable;
import org.hibernate.search.engine.backend.types.Searchable; import org.hibernate.search.engine.backend.types.Searchable;
import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.PropertyBinderRef; import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.PropertyBinderRef;
@ -56,7 +58,10 @@ import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.RoutingBinderR
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexingDependency;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ObjectPath;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBinding; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBinding;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyValue;
import org.hl7.fhir.r4.model.Coding; import org.hl7.fhir.r4.model.Coding;
import java.io.Serializable; import java.io.Serializable;
@ -68,6 +73,8 @@ import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static java.util.Objects.isNull;
import static java.util.Objects.nonNull;
import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.left;
import static org.apache.commons.lang3.StringUtils.length; import static org.apache.commons.lang3.StringUtils.length;
@ -165,15 +172,14 @@ public class TermConcept implements Serializable {
@Column(name = "INDEX_STATUS", nullable = true) @Column(name = "INDEX_STATUS", nullable = true)
private Long myIndexStatus; private Long myIndexStatus;
@Deprecated(since = "7.2.0")
@Lob @Lob
@Column(name = "PARENT_PIDS", nullable = true) @Column(name = "PARENT_PIDS", nullable = true)
@FullTextField(
name = "myParentPids",
searchable = Searchable.YES,
projectable = Projectable.YES,
analyzer = "conceptParentPidsAnalyzer")
private String myParentPids; private String myParentPids;
@Column(name = "PARENT_PIDS_VC", nullable = true, length = Length.LONG32)
private String myParentPidsVc;
@OneToMany( @OneToMany(
cascade = {}, cascade = {},
fetch = FetchType.LAZY, fetch = FetchType.LAZY,
@ -356,8 +362,15 @@ public class TermConcept implements Serializable {
return this; return this;
} }
@Transient
@FullTextField(
name = "myParentPids",
searchable = Searchable.YES,
projectable = Projectable.YES,
analyzer = "conceptParentPidsAnalyzer")
@IndexingDependency(derivedFrom = @ObjectPath({@PropertyValue(propertyName = "myParentPidsVc")}))
public String getParentPidsAsString() { public String getParentPidsAsString() {
return myParentPids; return nonNull(myParentPidsVc) ? myParentPidsVc : myParentPids;
} }
public List<TermConceptParentChildLink> getParents() { public List<TermConceptParentChildLink> getParents() {
@ -437,7 +450,7 @@ public class TermConcept implements Serializable {
@PreUpdate @PreUpdate
@PrePersist @PrePersist
public void prePersist() { public void prePersist() {
if (myParentPids == null) { if (isNull(myParentPids) && isNull(myParentPidsVc)) {
Set<Long> parentPids = new HashSet<>(); Set<Long> parentPids = new HashSet<>();
TermConcept entity = this; TermConcept entity = this;
parentPids(entity, parentPids); parentPids(entity, parentPids);
@ -464,6 +477,7 @@ public class TermConcept implements Serializable {
} }
public TermConcept setParentPids(String theParentPids) { public TermConcept setParentPids(String theParentPids) {
myParentPidsVc = theParentPids;
myParentPids = theParentPids; myParentPids = theParentPids;
return this; return this;
} }

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.entity; package ca.uhn.fhir.jpa.entity;
import ca.uhn.fhir.util.ValidateUtil; import ca.uhn.fhir.util.ValidateUtil;
import com.google.common.annotations.VisibleForTesting;
import jakarta.annotation.Nonnull; import jakarta.annotation.Nonnull;
import jakarta.persistence.Column; import jakarta.persistence.Column;
import jakarta.persistence.Entity; import jakarta.persistence.Entity;
@ -41,6 +42,7 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.Length;
import org.hibernate.annotations.JdbcTypeCode; import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.search.engine.backend.types.Projectable; import org.hibernate.search.engine.backend.types.Projectable;
import org.hibernate.search.engine.backend.types.Searchable; import org.hibernate.search.engine.backend.types.Searchable;
@ -68,7 +70,7 @@ import static org.apache.commons.lang3.StringUtils.length;
public class TermConceptProperty implements Serializable { public class TermConceptProperty implements Serializable {
public static final int MAX_PROPTYPE_ENUM_LENGTH = 6; public static final int MAX_PROPTYPE_ENUM_LENGTH = 6;
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private static final int MAX_LENGTH = 500; public static final int MAX_LENGTH = 500;
@ManyToOne(fetch = FetchType.LAZY) @ManyToOne(fetch = FetchType.LAZY)
@JoinColumn( @JoinColumn(
@ -106,10 +108,14 @@ public class TermConceptProperty implements Serializable {
@GenericField(name = "myValueString", searchable = Searchable.YES) @GenericField(name = "myValueString", searchable = Searchable.YES)
private String myValue; private String myValue;
@Deprecated(since = "7.2.0")
@Column(name = "PROP_VAL_LOB") @Column(name = "PROP_VAL_LOB")
@Lob() @Lob()
private byte[] myValueLob; private byte[] myValueLob;
@Column(name = "PROP_VAL_BIN", nullable = true, length = Length.LONG32)
private byte[] myValueBin;
@Enumerated(EnumType.ORDINAL) @Enumerated(EnumType.ORDINAL)
@Column(name = "PROP_TYPE", nullable = false, length = MAX_PROPTYPE_ENUM_LENGTH) @Column(name = "PROP_TYPE", nullable = false, length = MAX_PROPTYPE_ENUM_LENGTH)
@JdbcTypeCode(SqlTypes.INTEGER) @JdbcTypeCode(SqlTypes.INTEGER)
@ -196,8 +202,8 @@ public class TermConceptProperty implements Serializable {
* property, and the code for a {@link TermConceptPropertyTypeEnum#CODING coding} property. * property, and the code for a {@link TermConceptPropertyTypeEnum#CODING coding} property.
*/ */
public String getValue() { public String getValue() {
if (hasValueLob()) { if (hasValueBin()) {
return getValueLobAsString(); return getValueBinAsString();
} }
return myValue; return myValue;
} }
@ -208,36 +214,41 @@ public class TermConceptProperty implements Serializable {
*/ */
public TermConceptProperty setValue(String theValue) { public TermConceptProperty setValue(String theValue) {
if (theValue.length() > MAX_LENGTH) { if (theValue.length() > MAX_LENGTH) {
setValueLob(theValue); setValueBin(theValue);
} else { } else {
myValueLob = null; myValueLob = null;
myValueBin = null;
} }
myValue = left(theValue, MAX_LENGTH); myValue = left(theValue, MAX_LENGTH);
return this; return this;
} }
public boolean hasValueLob() { public boolean hasValueBin() {
if (myValueBin != null && myValueBin.length > 0) {
return true;
}
if (myValueLob != null && myValueLob.length > 0) { if (myValueLob != null && myValueLob.length > 0) {
return true; return true;
} }
return false; return false;
} }
public byte[] getValueLob() { public TermConceptProperty setValueBin(byte[] theValueBin) {
return myValueLob; myValueBin = theValueBin;
} myValueLob = theValueBin;
public TermConceptProperty setValueLob(byte[] theValueLob) {
myValueLob = theValueLob;
return this; return this;
} }
public TermConceptProperty setValueLob(String theValueLob) { public TermConceptProperty setValueBin(String theValueBin) {
myValueLob = theValueLob.getBytes(StandardCharsets.UTF_8); return setValueBin(theValueBin.getBytes(StandardCharsets.UTF_8));
return this;
} }
public String getValueLobAsString() { public String getValueBinAsString() {
if (myValueBin != null && myValueBin.length > 0) {
return new String(myValueBin, StandardCharsets.UTF_8);
}
return new String(myValueLob, StandardCharsets.UTF_8); return new String(myValueLob, StandardCharsets.UTF_8);
} }
@ -295,4 +306,24 @@ public class TermConceptProperty implements Serializable {
public Long getPid() { public Long getPid() {
return myId; return myId;
} }
@VisibleForTesting
public byte[] getValueBlobForTesting() {
return myValueLob;
}
@VisibleForTesting
public void setValueBlobForTesting(byte[] theValueLob) {
myValueLob = theValueLob;
}
@VisibleForTesting
public byte[] getValueBinForTesting() {
return myValueBin;
}
@VisibleForTesting
public void setValueBinForTesting(byte[] theValuebin) {
myValueBin = theValuebin;
}
} }

View File

@ -40,11 +40,13 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.Length;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import static org.apache.commons.lang3.StringUtils.isNotEmpty;
import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.left;
import static org.apache.commons.lang3.StringUtils.length; import static org.apache.commons.lang3.StringUtils.length;
@ -98,10 +100,14 @@ public class TermValueSetConcept implements Serializable {
@Column(name = "SOURCE_PID", nullable = true) @Column(name = "SOURCE_PID", nullable = true)
private Long mySourceConceptPid; private Long mySourceConceptPid;
@Deprecated(since = "7.2.0")
@Lob @Lob
@Column(name = "SOURCE_DIRECT_PARENT_PIDS", nullable = true) @Column(name = "SOURCE_DIRECT_PARENT_PIDS", nullable = true)
private String mySourceConceptDirectParentPids; private String mySourceConceptDirectParentPids;
@Column(name = "SOURCE_DIRECT_PARENT_PIDS_VC", nullable = true, length = Length.LONG32)
private String mySourceConceptDirectParentPidsVc;
@Column(name = "SYSTEM_URL", nullable = false, length = TermCodeSystem.MAX_URL_LENGTH) @Column(name = "SYSTEM_URL", nullable = false, length = TermCodeSystem.MAX_URL_LENGTH)
private String mySystem; private String mySystem;
@ -264,7 +270,7 @@ public class TermValueSetConcept implements Serializable {
.append("valueSetName", this.getValueSetName()) .append("valueSetName", this.getValueSetName())
.append("display", myDisplay) .append("display", myDisplay)
.append("designationCount", myDesignations != null ? myDesignations.size() : "(null)") .append("designationCount", myDesignations != null ? myDesignations.size() : "(null)")
.append("parentPids", mySourceConceptDirectParentPids) .append("parentPids", getSourceConceptDirectParentPids())
.toString(); .toString();
} }
@ -282,5 +288,12 @@ public class TermValueSetConcept implements Serializable {
public void setSourceConceptDirectParentPids(String theSourceConceptDirectParentPids) { public void setSourceConceptDirectParentPids(String theSourceConceptDirectParentPids) {
mySourceConceptDirectParentPids = theSourceConceptDirectParentPids; mySourceConceptDirectParentPids = theSourceConceptDirectParentPids;
mySourceConceptDirectParentPidsVc = theSourceConceptDirectParentPids;
}
public String getSourceConceptDirectParentPids() {
return isNotEmpty(mySourceConceptDirectParentPidsVc)
? mySourceConceptDirectParentPidsVc
: mySourceConceptDirectParentPids;
} }
} }

View File

@ -127,7 +127,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
Builder version = forVersion(VersionEnum.V7_2_0); Builder version = forVersion(VersionEnum.V7_2_0);
// allow null codes in concept map targets // allow null codes in concept map targets (see comment on "20190722.27" if you are going to change this)
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.modifyColumn("20240327.1", "TARGET_CODE") .modifyColumn("20240327.1", "TARGET_CODE")
.nullable() .nullable()
@ -139,6 +139,62 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
forcedId.dropIndex("20240402.2", "IDX_FORCEDID_RESID"); forcedId.dropIndex("20240402.2", "IDX_FORCEDID_RESID");
forcedId.dropIndex("20240402.3", "IDX_FORCEDID_TYPE_FID"); forcedId.dropIndex("20240402.3", "IDX_FORCEDID_TYPE_FID");
forcedId.dropIndex("20240402.4", "IDX_FORCEID_FID"); forcedId.dropIndex("20240402.4", "IDX_FORCEID_FID");
// Migration from LOB
{
Builder.BuilderWithTableName binaryStorageBlobTable = version.onTable("HFJ_BINARY_STORAGE_BLOB");
binaryStorageBlobTable
.renameColumn("20240404.1", "BLOB_ID", "CONTENT_ID")
.renameColumn("20240404.2", "BLOB_SIZE", "CONTENT_SIZE")
.renameColumn("20240404.3", "BLOB_HASH", "CONTENT_HASH");
binaryStorageBlobTable
.modifyColumn("20240404.4", "BLOB_DATA")
.nullable()
.withType(ColumnTypeEnum.BLOB);
binaryStorageBlobTable
.addColumn("20240404.5", "STORAGE_CONTENT_BIN")
.nullable()
.type(ColumnTypeEnum.BINARY);
binaryStorageBlobTable.migrateBlobToBinary("20240404.6", "BLOB_DATA", "STORAGE_CONTENT_BIN");
binaryStorageBlobTable.renameTable("20240404.7", "HFJ_BINARY_STORAGE");
}
{
Builder.BuilderWithTableName termConceptPropertyTable = version.onTable("TRM_CONCEPT_PROPERTY");
termConceptPropertyTable
.addColumn("20240409.1", "PROP_VAL_BIN")
.nullable()
.type(ColumnTypeEnum.BINARY);
termConceptPropertyTable.migrateBlobToBinary("20240409.2", "PROP_VAL_LOB", "PROP_VAL_BIN");
}
{
Builder.BuilderWithTableName termValueSetConceptTable = version.onTable("TRM_VALUESET_CONCEPT");
termValueSetConceptTable
.addColumn("20240409.3", "SOURCE_DIRECT_PARENT_PIDS_VC")
.nullable()
.type(ColumnTypeEnum.TEXT);
termValueSetConceptTable.migrateClobToText(
"20240409.4", "SOURCE_DIRECT_PARENT_PIDS", "SOURCE_DIRECT_PARENT_PIDS_VC");
}
{
Builder.BuilderWithTableName termConceptTable = version.onTable("TRM_CONCEPT");
termConceptTable
.addColumn("20240410.1", "PARENT_PIDS_VC")
.nullable()
.type(ColumnTypeEnum.TEXT);
termConceptTable.migrateClobToText("20240410.2", "PARENT_PIDS", "PARENT_PIDS_VC");
}
} }
protected void init700() { protected void init700() {
@ -2489,10 +2545,26 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.modifyColumn("20190722.26", "SYSTEM_VERSION") .modifyColumn("20190722.26", "SYSTEM_VERSION")
.nullable() .nullable()
.withType(ColumnTypeEnum.STRING, 200); .withType(ColumnTypeEnum.STRING, 200);
/*
DISABLED THIS STEP (20190722.27) ON PURPOSE BECAUSE IT STARTED CAUSING FAILURES ON MSSQL FOR A FRESH DB.
I left it here for historical purposes.
The reason for the failure is as follows. The TARGET_CODE column was originally 'not nullable' when it was
first introduced. And in 7_2_0, it is being changed to a nullable column (see 20240327.1 in init720()).
Starting with 7_2_0, on a fresh db, we create the table with nullable TARGET_CODE (as it is made nullable now).
Since we run all migration steps on fresh db, this step will try to convert the column which is created as nullable
to not nullable (which will then need to be coverted back to nullable in 7_2_0 migration).
Changing a nullable column to not nullable is not allowed in
MSSQL if there is an index on the column, which is the case here, as there is IDX_CNCPT_MP_GRP_ELM_TGT_CD
on this column. Since init720() has the right migration
step, where the column is set to nullable and has the right type and length, this statement is also
not necessary anymore even for not fresh dbs.
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.modifyColumn("20190722.27", "TARGET_CODE") .modifyColumn("20190722.27", "TARGET_CODE")
.nonNullable() .nonNullable()
.withType(ColumnTypeEnum.STRING, 500); .withType(ColumnTypeEnum.STRING, 500);
*/
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.modifyColumn("20190722.28", "VALUESET_URL") .modifyColumn("20190722.28", "VALUESET_URL")
.nullable() .nullable()

View File

@ -243,7 +243,7 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
*/ */
private byte[] fetchBlobFromBinary(IBaseBinary theBinary) throws IOException { private byte[] fetchBlobFromBinary(IBaseBinary theBinary) throws IOException {
if (myBinaryStorageSvc != null && !(myBinaryStorageSvc instanceof NullBinaryStorageSvcImpl)) { if (myBinaryStorageSvc != null && !(myBinaryStorageSvc instanceof NullBinaryStorageSvcImpl)) {
return myBinaryStorageSvc.fetchDataBlobFromBinary(theBinary); return myBinaryStorageSvc.fetchDataByteArrayFromBinary(theBinary);
} else { } else {
byte[] value = BinaryUtil.getOrCreateData(myCtx, theBinary).getValue(); byte[] value = BinaryUtil.getOrCreateData(myCtx, theBinary).getValue();
if (value == null) { if (value == null) {

View File

@ -0,0 +1,81 @@
package ca.uhn.fhir.jpa.entity;
import com.google.common.base.Strings;
import org.junit.jupiter.api.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
public class TermConceptPropertyTest {
private static final String ourVeryLongString = Strings.repeat("a", TermConceptProperty.MAX_LENGTH+1);
@Test
public void testSetValue_whenValueExceedsMAX_LENGTH_willWriteToBlobAndBin(){
// given
TermConceptProperty termConceptProperty = new TermConceptProperty();
// when
termConceptProperty.setValue(ourVeryLongString);
// then
assertThat(termConceptProperty.getValueBlobForTesting(), notNullValue());
assertThat(termConceptProperty.getValueBinForTesting(), notNullValue());
}
@Test
public void testHasValueBin_willDefaultToAssertingValueBin(){
// given
TermConceptProperty termConceptProperty = new TermConceptProperty();
termConceptProperty.setValueBinForTesting(ourVeryLongString.getBytes());
termConceptProperty.setValueBlobForTesting(null);
// when/then
assertThat(termConceptProperty.hasValueBin(), is(true));
}
@Test
public void testHasValueBin_willAssertValueBlob_whenValueBinNotPresent(){
// given
TermConceptProperty termConceptProperty = new TermConceptProperty();
termConceptProperty.setValueBinForTesting(null);
termConceptProperty.setValueBlobForTesting(ourVeryLongString.getBytes());
// when/then
assertThat(termConceptProperty.hasValueBin(), is(true));
}
@Test
public void testGetValue_whenValueExceedsMAX_LENGTH_willGetValueBinByDefault(){
// given
TermConceptProperty termConceptProperty = new TermConceptProperty();
termConceptProperty.setValueBinForTesting(ourVeryLongString.getBytes());
termConceptProperty.setValueBlobForTesting(null);
// when
String value = termConceptProperty.getValue();
// then
assertThat(value, startsWith("a"));
}
@Test
public void testGetValue_whenOnlyValueBlobIsSet_willGetValueValueBlob(){
// given
TermConceptProperty termConceptProperty = new TermConceptProperty();
termConceptProperty.setValueBinForTesting(null);
termConceptProperty.setValueBlobForTesting(ourVeryLongString.getBytes());
// when
String value = termConceptProperty.getValue();
// then
assertThat(value, startsWith("a"));
}
}

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -3,7 +3,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -3,7 +3,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -26,37 +26,44 @@ import jakarta.persistence.Lob;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import jakarta.persistence.Temporal; import jakarta.persistence.Temporal;
import jakarta.persistence.TemporalType; import jakarta.persistence.TemporalType;
import org.hibernate.Length;
import java.sql.Blob; import java.sql.Blob;
import java.util.Date; import java.util.Date;
import static java.util.Objects.nonNull;
@Entity @Entity
@Table(name = "HFJ_BINARY_STORAGE_BLOB") @Table(name = "HFJ_BINARY_STORAGE")
public class BinaryStorageEntity { public class BinaryStorageEntity {
@Id @Id
@Column(name = "BLOB_ID", length = 200, nullable = false) @Column(name = "CONTENT_ID", length = 200, nullable = false)
// N.B GGG: Note that the `blob id` is the same as the `externalized binary id`. // N.B GGG: Note that the `content id` is the same as the `externalized binary id`.
private String myBlobId; private String myContentId;
@Column(name = "RESOURCE_ID", length = 100, nullable = false) @Column(name = "RESOURCE_ID", length = 100, nullable = false)
private String myResourceId; private String myResourceId;
@Column(name = "BLOB_SIZE", nullable = true) @Column(name = "CONTENT_SIZE", nullable = true)
private long mySize; private long mySize;
@Column(name = "CONTENT_TYPE", nullable = false, length = 100) @Column(name = "CONTENT_TYPE", nullable = false, length = 100)
private String myBlobContentType; private String myContentType;
@Lob @Deprecated(since = "7.2.0")
@Column(name = "BLOB_DATA", nullable = false, insertable = true, updatable = false) @Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
@Column(name = "BLOB_DATA", nullable = true, insertable = true, updatable = false)
private Blob myBlob; private Blob myBlob;
@Column(name = "STORAGE_CONTENT_BIN", nullable = true, length = Length.LONG32)
private byte[] myStorageContentBin;
@Temporal(TemporalType.TIMESTAMP) @Temporal(TemporalType.TIMESTAMP)
@Column(name = "PUBLISHED_DATE", nullable = false) @Column(name = "PUBLISHED_DATE", nullable = false)
private Date myPublished; private Date myPublished;
@Column(name = "BLOB_HASH", length = 128, nullable = true) @Column(name = "CONTENT_HASH", length = 128, nullable = true)
private String myHash; private String myHash;
public Date getPublished() { public Date getPublished() {
@ -71,8 +78,8 @@ public class BinaryStorageEntity {
return myHash; return myHash;
} }
public void setBlobId(String theBlobId) { public void setContentId(String theContentId) {
myBlobId = theBlobId; myContentId = theContentId;
} }
public void setResourceId(String theResourceId) { public void setResourceId(String theResourceId) {
@ -83,12 +90,12 @@ public class BinaryStorageEntity {
return mySize; return mySize;
} }
public String getBlobContentType() { public String getContentType() {
return myBlobContentType; return myContentType;
} }
public void setBlobContentType(String theBlobContentType) { public void setContentType(String theContentType) {
myBlobContentType = theBlobContentType; myContentType = theContentType;
} }
public Blob getBlob() { public Blob getBlob() {
@ -99,8 +106,8 @@ public class BinaryStorageEntity {
myBlob = theBlob; myBlob = theBlob;
} }
public String getBlobId() { public String getContentId() {
return myBlobId; return myContentId;
} }
public void setSize(long theSize) { public void setSize(long theSize) {
@ -110,4 +117,21 @@ public class BinaryStorageEntity {
public void setHash(String theHash) { public void setHash(String theHash) {
myHash = theHash; myHash = theHash;
} }
public byte[] getStorageContentBin() {
return myStorageContentBin;
}
public BinaryStorageEntity setStorageContentBin(byte[] theStorageContentBin) {
myStorageContentBin = theStorageContentBin;
return this;
}
public boolean hasStorageContent() {
return nonNull(myStorageContentBin);
}
public boolean hasBlob() {
return nonNull(myBlob);
}
} }

View File

@ -36,6 +36,7 @@ import jakarta.persistence.Index;
import jakarta.persistence.JoinColumn; import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne; import jakarta.persistence.ManyToOne;
import jakarta.persistence.PrePersist; import jakarta.persistence.PrePersist;
import jakarta.persistence.PreUpdate;
import jakarta.persistence.SequenceGenerator; import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -429,6 +430,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
* We don't truncate earlier in the flow because the index hashes MUST be calculated on the full string. * We don't truncate earlier in the flow because the index hashes MUST be calculated on the full string.
*/ */
@PrePersist @PrePersist
@PreUpdate
public void truncateFieldsForDB() { public void truncateFieldsForDB() {
mySystem = StringUtils.truncate(mySystem, MAX_LENGTH); mySystem = StringUtils.truncate(mySystem, MAX_LENGTH);
myValue = StringUtils.truncate(myValue, MAX_LENGTH); myValue = StringUtils.truncate(myValue, MAX_LENGTH);

View File

@ -97,7 +97,7 @@ public class StorageSettings {
private boolean myDefaultSearchParamsCanBeOverridden = true; private boolean myDefaultSearchParamsCanBeOverridden = true;
private Set<Subscription.SubscriptionChannelType> mySupportedSubscriptionTypes = new HashSet<>(); private Set<Subscription.SubscriptionChannelType> mySupportedSubscriptionTypes = new HashSet<>();
private boolean myAutoCreatePlaceholderReferenceTargets; private boolean myAutoCreatePlaceholderReferenceTargets;
private boolean myCrossPartitionSubscriptionEnabled = false; private boolean myCrossPartitionSubscriptionEnabled = true;
private Integer myBundleBatchPoolSize = DEFAULT_BUNDLE_BATCH_POOL_SIZE; private Integer myBundleBatchPoolSize = DEFAULT_BUNDLE_BATCH_POOL_SIZE;
private Integer myBundleBatchMaxPoolSize = DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE; private Integer myBundleBatchMaxPoolSize = DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE;
private boolean myEnableInMemorySubscriptionMatching = true; private boolean myEnableInMemorySubscriptionMatching = true;

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.1.7-SNAPSHOT</version> <version>7.3.0-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.subscription.model.config; package ca.uhn.fhir.jpa.subscription.model.config;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.subscription.match.matcher.matching.SubscriptionStrategyEvaluator; import ca.uhn.fhir.jpa.subscription.match.matcher.matching.SubscriptionStrategyEvaluator;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
@ -29,8 +30,9 @@ import org.springframework.context.annotation.Configuration;
public class SubscriptionModelConfig { public class SubscriptionModelConfig {
@Bean @Bean
public SubscriptionCanonicalizer subscriptionCanonicalizer(FhirContext theFhirContext) { public SubscriptionCanonicalizer subscriptionCanonicalizer(
return new SubscriptionCanonicalizer(theFhirContext); FhirContext theFhirContext, StorageSettings theStorageSettings) {
return new SubscriptionCanonicalizer(theFhirContext, theStorageSettings);
} }
@Bean @Bean

View File

@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage; import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage;
@ -50,7 +51,7 @@ public class SubscriptionRegisteringSubscriberTest {
@Mock @Mock
private SubscriptionRegistry mySubscriptionRegistry; private SubscriptionRegistry mySubscriptionRegistry;
@Spy @Spy
private SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(myFhirContext); private SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(myFhirContext, new StorageSettings());
@Mock @Mock
private DaoRegistry myDaoRegistry; private DaoRegistry myDaoRegistry;
@Mock @Mock

View File

@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.subscription.match.registry;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.subscription.channel.subscription.ISubscriptionDeliveryChannelNamer; import ca.uhn.fhir.jpa.subscription.channel.subscription.ISubscriptionDeliveryChannelNamer;
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelRegistry; import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelRegistry;
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription; import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription;
@ -33,7 +34,7 @@ public class SubscriptionRegistryTest {
static FhirContext ourFhirContext = FhirContext.forR4Cached(); static FhirContext ourFhirContext = FhirContext.forR4Cached();
@Spy @Spy
SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(ourFhirContext); SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(ourFhirContext, new StorageSettings());
@Spy @Spy
ISubscriptionDeliveryChannelNamer mySubscriptionDeliveryChannelNamer = new TestChannelNamer(); ISubscriptionDeliveryChannelNamer mySubscriptionDeliveryChannelNamer = new TestChannelNamer();

View File

@ -1,6 +1,7 @@
package ca.uhn.fhir.jpa.subscription.module; package ca.uhn.fhir.jpa.subscription.module;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription; import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer;
import ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryJsonMessage; import ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryJsonMessage;
@ -8,12 +9,15 @@ import ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage;
import ca.uhn.fhir.util.HapiExtensions; import ca.uhn.fhir.util.HapiExtensions;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import jakarta.annotation.Nonnull;
import org.assertj.core.util.Lists; import org.assertj.core.util.Lists;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.hl7.fhir.r4.model.BooleanType; import org.hl7.fhir.r4.model.BooleanType;
import org.hl7.fhir.r4.model.StringType; import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.Subscription; import org.hl7.fhir.r4.model.Subscription;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -66,7 +70,7 @@ public class CanonicalSubscriptionTest {
@Test @Test
public void testCanonicalSubscriptionRetainsMetaTags() throws IOException { public void testCanonicalSubscriptionRetainsMetaTags() throws IOException {
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4()); SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), new StorageSettings());
CanonicalSubscription sub1 = canonicalizer.canonicalize(makeMdmSubscription()); CanonicalSubscription sub1 = canonicalizer.canonicalize(makeMdmSubscription());
assertTrue(sub1.getTags().keySet().contains(TAG_SYSTEM)); assertTrue(sub1.getTags().keySet().contains(TAG_SYSTEM));
assertEquals(sub1.getTags().get(TAG_SYSTEM), TAG_VALUE); assertEquals(sub1.getTags().get(TAG_SYSTEM), TAG_VALUE);
@ -74,7 +78,7 @@ public class CanonicalSubscriptionTest {
@Test @Test
public void emailDetailsEquals() { public void emailDetailsEquals() {
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4()); SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), new StorageSettings());
CanonicalSubscription sub1 = canonicalizer.canonicalize(makeEmailSubscription()); CanonicalSubscription sub1 = canonicalizer.canonicalize(makeEmailSubscription());
CanonicalSubscription sub2 = canonicalizer.canonicalize(makeEmailSubscription()); CanonicalSubscription sub2 = canonicalizer.canonicalize(makeEmailSubscription());
assertTrue(sub1.equals(sub2)); assertTrue(sub1.equals(sub2));
@ -82,7 +86,7 @@ public class CanonicalSubscriptionTest {
@Test @Test
public void testSerializeMultiPartitionSubscription(){ public void testSerializeMultiPartitionSubscription(){
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4()); SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), new StorageSettings());
Subscription subscription = makeEmailSubscription(); Subscription subscription = makeEmailSubscription();
subscription.addExtension(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION, new BooleanType().setValue(true)); subscription.addExtension(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION, new BooleanType().setValue(true));
CanonicalSubscription canonicalSubscription = canonicalizer.canonicalize(subscription); CanonicalSubscription canonicalSubscription = canonicalizer.canonicalize(subscription);
@ -90,28 +94,30 @@ public class CanonicalSubscriptionTest {
assertEquals(canonicalSubscription.getCrossPartitionEnabled(), true); assertEquals(canonicalSubscription.getCrossPartitionEnabled(), true);
} }
@Test @ParameterizedTest
public void testSerializeIncorrectMultiPartitionSubscription(){ @ValueSource(booleans = {true, false})
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4()); public void testSerializeIncorrectMultiPartitionSubscription(boolean theIsCrossPartitionEnabled){
final StorageSettings storageSettings = buildStorageSettings(theIsCrossPartitionEnabled);
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), storageSettings);
Subscription subscription = makeEmailSubscription(); Subscription subscription = makeEmailSubscription();
subscription.addExtension(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION, new StringType().setValue("false")); subscription.addExtension(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION, new StringType().setValue("false"));
CanonicalSubscription canonicalSubscription = canonicalizer.canonicalize(subscription); CanonicalSubscription canonicalSubscription = canonicalizer.canonicalize(subscription);
System.out.print(canonicalSubscription); assertEquals(canonicalSubscription.getCrossPartitionEnabled(), theIsCrossPartitionEnabled);
assertEquals(canonicalSubscription.getCrossPartitionEnabled(), false);
} }
@Test @ParameterizedTest
public void testSerializeNonMultiPartitionSubscription(){ @ValueSource(booleans = {true, false})
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4()); public void testSerializeNonMultiPartitionSubscription(boolean theIsCrossPartitionEnabled){
final StorageSettings storageSettings = buildStorageSettings(theIsCrossPartitionEnabled);
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), storageSettings);
Subscription subscription = makeEmailSubscription(); Subscription subscription = makeEmailSubscription();
subscription.addExtension(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION, new BooleanType().setValue(false)); subscription.addExtension(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION, new BooleanType().setValue(false));
CanonicalSubscription canonicalSubscription = canonicalizer.canonicalize(subscription); CanonicalSubscription canonicalSubscription = canonicalizer.canonicalize(subscription);
System.out.print(canonicalSubscription); System.out.print(canonicalSubscription);
assertEquals(canonicalSubscription.getCrossPartitionEnabled(), false); assertEquals(canonicalSubscription.getCrossPartitionEnabled(), theIsCrossPartitionEnabled);
} }
@Test @Test
@ -154,4 +160,11 @@ public class CanonicalSubscriptionTest {
ResourceDeliveryMessage payload = resourceDeliveryMessage.getPayload(); ResourceDeliveryMessage payload = resourceDeliveryMessage.getPayload();
return payload.getSubscription(); return payload.getSubscription();
} }
@Nonnull
private static StorageSettings buildStorageSettings(boolean theIsCrossPartitionEnabled) {
final StorageSettings storageSettings = new StorageSettings();
storageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled);
return storageSettings;
}
} }

View File

@ -27,6 +27,8 @@ import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.mockito.Answers; import org.mockito.Answers;
import org.mockito.InjectMocks; import org.mockito.InjectMocks;
import org.mockito.Mock; import org.mockito.Mock;
@ -38,6 +40,7 @@ import java.util.Optional;
import static ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionCriteriaParser.TypeEnum.STARTYPE_EXPRESSION; import static ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionCriteriaParser.TypeEnum.STARTYPE_EXPRESSION;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.atLeastOnce;
@ -227,8 +230,10 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
ourObservationListener.awaitExpected(); ourObservationListener.awaitExpected();
} }
@Test @ParameterizedTest
public void testSubscriptionAndResourceOnDiffPartitionNotMatch() throws InterruptedException { @ValueSource(booleans = {true, false})
public void testSubscriptionAndResourceOnDiffPartitionNotMatch(boolean theIsCrossPartitionEnabled) throws InterruptedException {
myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled);
myPartitionSettings.setPartitioningEnabled(true); myPartitionSettings.setPartitioningEnabled(true);
String payload = "application/fhir+json"; String payload = "application/fhir+json";
@ -240,13 +245,18 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
mockSubscriptionRead(requestPartitionId, subscription); mockSubscriptionRead(requestPartitionId, subscription);
sendSubscription(subscription, requestPartitionId, true); sendSubscription(subscription, requestPartitionId, true);
mySubscriptionResourceNotMatched.setExpectedCount(1); final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(0));
sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(0)); if (theIsCrossPartitionEnabled) {
mySubscriptionResourceNotMatched.awaitExpected(); runWithinLatchLogicExpectSuccess(throwsInterrupted);
} else {
runWithLatchLogicExpectFailure(throwsInterrupted);
}
} }
@Test @ParameterizedTest
public void testSubscriptionAndResourceOnDiffPartitionNotMatchPart2() throws InterruptedException { @ValueSource(booleans = {true, false})
public void testSubscriptionAndResourceOnDiffPartitionNotMatchPart2(boolean theIsCrossPartitionEnabled) throws InterruptedException {
myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled);
myPartitionSettings.setPartitioningEnabled(true); myPartitionSettings.setPartitioningEnabled(true);
String payload = "application/fhir+json"; String payload = "application/fhir+json";
@ -258,13 +268,19 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
mockSubscriptionRead(requestPartitionId, subscription); mockSubscriptionRead(requestPartitionId, subscription);
sendSubscription(subscription, requestPartitionId, true); sendSubscription(subscription, requestPartitionId, true);
mySubscriptionResourceNotMatched.setExpectedCount(1); final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(1));
sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(1));
mySubscriptionResourceNotMatched.awaitExpected(); if (theIsCrossPartitionEnabled) {
runWithinLatchLogicExpectSuccess(throwsInterrupted);
} else {
runWithLatchLogicExpectFailure(throwsInterrupted);
}
} }
@Test @ParameterizedTest
public void testSubscriptionOnDefaultPartitionAndResourceOnDiffPartitionNotMatch() throws InterruptedException { @ValueSource(booleans = {true, false})
public void testSubscriptionOnDefaultPartitionAndResourceOnDiffPartitionNotMatch(boolean theIsCrossPartitionEnabled) throws InterruptedException {
myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled);
myPartitionSettings.setPartitioningEnabled(true); myPartitionSettings.setPartitioningEnabled(true);
String payload = "application/fhir+json"; String payload = "application/fhir+json";
@ -276,13 +292,19 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
mockSubscriptionRead(requestPartitionId, subscription); mockSubscriptionRead(requestPartitionId, subscription);
sendSubscription(subscription, requestPartitionId, true); sendSubscription(subscription, requestPartitionId, true);
mySubscriptionResourceNotMatched.setExpectedCount(1); final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(1));
sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(1));
mySubscriptionResourceNotMatched.awaitExpected(); if (theIsCrossPartitionEnabled) {
runWithinLatchLogicExpectSuccess(throwsInterrupted);
} else {
runWithLatchLogicExpectFailure(throwsInterrupted);
}
} }
@Test @ParameterizedTest
public void testSubscriptionOnAPartitionAndResourceOnDefaultPartitionNotMatch() throws InterruptedException { @ValueSource(booleans = {true, false})
public void testSubscriptionOnAPartitionAndResourceOnDefaultPartitionNotMatch(boolean theIsCrossPartitionEnabled) throws InterruptedException {
myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled);
myPartitionSettings.setPartitioningEnabled(true); myPartitionSettings.setPartitioningEnabled(true);
String payload = "application/fhir+json"; String payload = "application/fhir+json";
@ -294,9 +316,13 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
mockSubscriptionRead(requestPartitionId, subscription); mockSubscriptionRead(requestPartitionId, subscription);
sendSubscription(subscription, requestPartitionId, true); sendSubscription(subscription, requestPartitionId, true);
mySubscriptionResourceNotMatched.setExpectedCount(1); final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.defaultPartition());
sendObservation(code, "SNOMED-CT", RequestPartitionId.defaultPartition());
mySubscriptionResourceNotMatched.awaitExpected(); if (theIsCrossPartitionEnabled) {
runWithinLatchLogicExpectSuccess(throwsInterrupted);
} else {
runWithLatchLogicExpectFailure(throwsInterrupted);
}
} }
@Test @Test
@ -320,8 +346,10 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
ourObservationListener.awaitExpected(); ourObservationListener.awaitExpected();
} }
@Test @ParameterizedTest
public void testSubscriptionOnOnePartitionDoNotMatchResourceOnMultiplePartitions() throws InterruptedException { @ValueSource(booleans = {true, false})
public void testSubscriptionOnOnePartitionDoNotMatchResourceOnMultiplePartitions(boolean theIsCrossPartitionEnabled) throws InterruptedException {
myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled);
myPartitionSettings.setPartitioningEnabled(true); myPartitionSettings.setPartitioningEnabled(true);
String payload = "application/fhir+json"; String payload = "application/fhir+json";
@ -333,10 +361,13 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
mockSubscriptionRead(requestPartitionId, subscription); mockSubscriptionRead(requestPartitionId, subscription);
sendSubscription(subscription, requestPartitionId, true); sendSubscription(subscription, requestPartitionId, true);
mySubscriptionResourceNotMatched.setExpectedCount(1); final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionIds(Collections.synchronizedList(Lists.newArrayList(0, 2))));
List<Integer> partitionId = Collections.synchronizedList(Lists.newArrayList(0, 2));
sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionIds(partitionId)); if (theIsCrossPartitionEnabled) {
mySubscriptionResourceNotMatched.awaitExpected(); runWithinLatchLogicExpectSuccess(throwsInterrupted);
} else {
runWithLatchLogicExpectFailure(throwsInterrupted);
}
} }
@Test @Test
@ -519,4 +550,31 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
verify(message, atLeastOnce()).getPayloadId(null); verify(message, atLeastOnce()).getPayloadId(null);
} }
} }
private interface ThrowsInterrupted {
void runOrThrow() throws InterruptedException;
}
private void runWithLatchLogicExpectFailure(ThrowsInterrupted theRunnable) {
try {
mySubscriptionResourceNotMatched.setExpectedCount(1);
theRunnable.runOrThrow();
mySubscriptionResourceNotMatched.awaitExpected();
} catch (InterruptedException exception) {
Thread.currentThread().interrupt();
}
}
private void runWithinLatchLogicExpectSuccess(ThrowsInterrupted theRunnable) {
try {
ourObservationListener.setExpectedCount(1);
mySubscriptionResourceMatched.setExpectedCount(1);
theRunnable.runOrThrow();
mySubscriptionResourceMatched.awaitExpected();
ourObservationListener.awaitExpected();
} catch (InterruptedException exception) {
fail();
Thread.currentThread().interrupt();
}
}
} }

Some files were not shown because too many files have changed in this diff Show More