Merge branch 'master' into mb-fix-history-prefetch
# Conflicts: # hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java
This commit is contained in:
commit
410aeb6232
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -138,6 +138,11 @@
|
|||
<artifactId>system-stubs-jupiter</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- OpenTelemetry -->
|
||||
<dependency>
|
||||
<groupId>io.opentelemetry</groupId>
|
||||
<artifactId>opentelemetry-api</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -1080,10 +1080,52 @@ public enum Pointcut implements IPointcut {
|
|||
SUBSCRIPTION_TOPIC_AFTER_PERSISTED_RESOURCE_CHECKED(
|
||||
void.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"),
|
||||
|
||||
/**
|
||||
* <b>Storage Hook:</b>
|
||||
* Invoked when a Bulk Export job is being kicked off, but before any permission checks
|
||||
* have been done.
|
||||
* This hook can be used to modify or update parameters as need be before
|
||||
* authorization/permission checks are done.
|
||||
* <p>
|
||||
* Hooks may accept the following parameters:
|
||||
* </p>
|
||||
* <ul>
|
||||
* <li>
|
||||
* ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions - The details of the job being kicked off
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||
* pulled out of the servlet request. Note that the bean
|
||||
* properties are not all guaranteed to be populated, depending on how early during processing the
|
||||
* exception occurred. <b>Note that this parameter may be null in contexts where the request is not
|
||||
* known, such as while processing searches</b>
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
|
||||
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
|
||||
* </li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Hooks should return <code>void</code>, and can throw exceptions.
|
||||
* </p>
|
||||
*/
|
||||
STORAGE_PRE_INITIATE_BULK_EXPORT(
|
||||
void.class,
|
||||
"ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters",
|
||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"),
|
||||
|
||||
/**
|
||||
* <b>Storage Hook:</b>
|
||||
* Invoked when a Bulk Export job is being kicked off. Hook methods may modify
|
||||
* the request, or raise an exception to prevent it from being initiated.
|
||||
*
|
||||
* This hook is not guaranteed to be called before permission checks, and so
|
||||
* anu implementers should be cautious of changing the options in ways that would
|
||||
* affect permissions.
|
||||
* <p>
|
||||
* Hooks may accept the following parameters:
|
||||
* </p>
|
||||
|
@ -2909,7 +2951,9 @@ public enum Pointcut implements IPointcut {
|
|||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
|
||||
"ca.uhn.fhir.jpa.util.SqlQueryList"),
|
||||
|
||||
@Deprecated(since = "7.2.0 - Use STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX instead.")
|
||||
/**
|
||||
* <b> Deprecated but still supported. Will eventually be removed. <code>Please use Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX</code> </b>
|
||||
* <b> Binary Blob Prefix Assigning Hook:</b>
|
||||
* <p>
|
||||
* Immediately before a binary blob is stored to its eventual data sink, this hook is called.
|
||||
|
@ -2935,6 +2979,32 @@ public enum Pointcut implements IPointcut {
|
|||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||
"org.hl7.fhir.instance.model.api.IBaseResource"),
|
||||
|
||||
/**
|
||||
* <b> Binary Content Prefix Assigning Hook:</b>
|
||||
* <p>
|
||||
* Immediately before binary content is stored to its eventual data sink, this hook is called.
|
||||
* This hook allows implementers to provide a prefix to the binary content's ID.
|
||||
* This is helpful in cases where you want to identify this blob for later retrieval outside of HAPI-FHIR. Note that allowable characters will depend on the specific storage sink being used.
|
||||
* <ul>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||
* pulled out of the servlet request. Note that the bean
|
||||
* properties are not all guaranteed to be populated.
|
||||
* </li>
|
||||
* <li>
|
||||
* org.hl7.fhir.instance.model.api.IBaseBinary - The binary resource that is about to be stored.
|
||||
* </li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Hooks should return <code>String</code>, which represents the full prefix to be applied to the blob.
|
||||
* </p>
|
||||
*/
|
||||
STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX(
|
||||
String.class,
|
||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||
"org.hl7.fhir.instance.model.api.IBaseResource"),
|
||||
|
||||
/**
|
||||
* <b>Storage Hook:</b>
|
||||
* Invoked before a batch job is persisted to the database.
|
||||
|
|
|
@ -31,6 +31,8 @@ import ca.uhn.fhir.util.ReflectionUtil;
|
|||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import io.opentelemetry.api.trace.Span;
|
||||
import io.opentelemetry.instrumentation.annotations.WithSpan;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
@ -547,7 +549,7 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
|
|||
|
||||
// Invoke the method
|
||||
try {
|
||||
return myMethod.invoke(getInterceptor(), args);
|
||||
return invokeMethod(args);
|
||||
} catch (InvocationTargetException e) {
|
||||
Throwable targetException = e.getTargetException();
|
||||
if (myPointcut.isShouldLogAndSwallowException(targetException)) {
|
||||
|
@ -566,6 +568,19 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
|
|||
throw new InternalErrorException(Msg.code(1911) + e);
|
||||
}
|
||||
}
|
||||
|
||||
@WithSpan("hapifhir.interceptor")
|
||||
private Object invokeMethod(Object[] args) throws InvocationTargetException, IllegalAccessException {
|
||||
// Add attributes to the opentelemetry span
|
||||
Span currentSpan = Span.current();
|
||||
currentSpan.setAttribute("hapifhir.interceptor.pointcut_name", myPointcut.name());
|
||||
currentSpan.setAttribute(
|
||||
"hapifhir.interceptor.class_name",
|
||||
myMethod.getDeclaringClass().getName());
|
||||
currentSpan.setAttribute("hapifhir.interceptor.method_name", myMethod.getName());
|
||||
|
||||
return myMethod.invoke(getInterceptor(), args);
|
||||
}
|
||||
}
|
||||
|
||||
protected class HookDescriptor {
|
||||
|
|
|
@ -609,12 +609,14 @@ public abstract class BaseParser implements IParser {
|
|||
private boolean isStripVersionsFromReferences(
|
||||
CompositeChildElement theCompositeChildElement, IBaseResource theResource) {
|
||||
|
||||
Set<String> autoVersionReferencesAtPathExtensions =
|
||||
MetaUtil.getAutoVersionReferencesAtPath(theResource.getMeta(), myContext.getResourceType(theResource));
|
||||
if (theResource != null) {
|
||||
Set<String> autoVersionReferencesAtPathExtensions = MetaUtil.getAutoVersionReferencesAtPath(
|
||||
theResource.getMeta(), myContext.getResourceType(theResource));
|
||||
|
||||
if (!autoVersionReferencesAtPathExtensions.isEmpty()
|
||||
&& theCompositeChildElement.anyPathMatches(autoVersionReferencesAtPathExtensions)) {
|
||||
return false;
|
||||
if (!autoVersionReferencesAtPathExtensions.isEmpty()
|
||||
&& theCompositeChildElement.anyPathMatches(autoVersionReferencesAtPathExtensions)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
Boolean stripVersionsFromReferences = myStripVersionsFromReferences;
|
||||
|
@ -622,21 +624,20 @@ public abstract class BaseParser implements IParser {
|
|||
return stripVersionsFromReferences;
|
||||
}
|
||||
|
||||
if (myContext.getParserOptions().isStripVersionsFromReferences() == false) {
|
||||
if (!myContext.getParserOptions().isStripVersionsFromReferences()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Set<String> dontStripVersionsFromReferencesAtPaths = myDontStripVersionsFromReferencesAtPaths;
|
||||
if (dontStripVersionsFromReferencesAtPaths != null) {
|
||||
if (dontStripVersionsFromReferencesAtPaths.isEmpty() == false
|
||||
&& theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths)) {
|
||||
return false;
|
||||
}
|
||||
if (dontStripVersionsFromReferencesAtPaths != null
|
||||
&& !dontStripVersionsFromReferencesAtPaths.isEmpty()
|
||||
&& theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
dontStripVersionsFromReferencesAtPaths =
|
||||
myContext.getParserOptions().getDontStripVersionsFromReferencesAtPaths();
|
||||
return dontStripVersionsFromReferencesAtPaths.isEmpty() != false
|
||||
return dontStripVersionsFromReferencesAtPaths.isEmpty()
|
||||
|| !theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths);
|
||||
}
|
||||
|
||||
|
|
|
@ -112,6 +112,7 @@ public enum VersionEnum {
|
|||
V6_2_5,
|
||||
// Dev Build
|
||||
V6_3_0,
|
||||
|
||||
V6_4_0,
|
||||
V6_4_1,
|
||||
V6_4_2,
|
||||
|
@ -119,10 +120,13 @@ public enum VersionEnum {
|
|||
V6_4_4,
|
||||
V6_4_5,
|
||||
V6_4_6,
|
||||
|
||||
V6_5_0,
|
||||
|
||||
V6_6_0,
|
||||
V6_6_1,
|
||||
V6_6_2,
|
||||
|
||||
V6_7_0,
|
||||
V6_8_0,
|
||||
V6_8_1,
|
||||
|
@ -130,21 +134,36 @@ public enum VersionEnum {
|
|||
V6_8_3,
|
||||
V6_8_4,
|
||||
V6_8_5,
|
||||
V6_8_6,
|
||||
V6_8_7,
|
||||
|
||||
V6_9_0,
|
||||
|
||||
V6_10_0,
|
||||
V6_10_1,
|
||||
V6_10_2,
|
||||
V6_10_3,
|
||||
V6_10_4,
|
||||
V6_10_5,
|
||||
|
||||
V6_11_0,
|
||||
|
||||
V7_0_0,
|
||||
V7_0_1,
|
||||
V7_0_2,
|
||||
|
||||
V7_1_0,
|
||||
V7_2_0;
|
||||
V7_2_0,
|
||||
|
||||
V7_3_0,
|
||||
V7_4_0;
|
||||
|
||||
public static VersionEnum latestVersion() {
|
||||
VersionEnum[] values = VersionEnum.values();
|
||||
return values[values.length - 1];
|
||||
}
|
||||
|
||||
public boolean isNewerThan(VersionEnum theVersionEnum) {
|
||||
return ordinal() > theVersionEnum.ordinal();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
@ -12,7 +12,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -218,6 +218,8 @@ public class HapiFlywayMigrateDatabaseCommandTest {
|
|||
"-p", "SA"
|
||||
};
|
||||
|
||||
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BINARY_STORAGE_BLOB"));
|
||||
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BINARY_STORAGE"));
|
||||
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RESOURCE"));
|
||||
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB"));
|
||||
App.main(args);
|
||||
|
|
|
@ -60,6 +60,7 @@ create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varch
|
|||
create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(50) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID));
|
||||
create table TRM_CONCEPT_PC_LINK (PID bigint not null, CHILD_PID bigint, PARENT_PID bigint, REL_TYPE integer, CODESYSTEM_PID bigint not null, primary key (PID));
|
||||
create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CONCEPT_PID bigint, primary key (PID));
|
||||
create table HFJ_BINARY_STORAGE_BLOB ( BLOB_ID varchar(200) not null, BLOB_DATA blob not null, CONTENT_TYPE varchar(100) not null, BLOB_HASH varchar(128), PUBLISHED_DATE timestamp(6) not null, RESOURCE_ID varchar(100) not null, BLOB_SIZE bigint, primary key (BLOB_ID) );
|
||||
create index IDX_FORCEDID_TYPE_FORCEDID on HFJ_FORCED_ID (RESOURCE_TYPE, FORCED_ID);
|
||||
create unique index IDX_FORCEDID_RESID on HFJ_FORCED_ID (RESOURCE_PID);
|
||||
create unique index IDX_FORCEDID_TYPE_RESID on HFJ_FORCED_ID (RESOURCE_TYPE, RESOURCE_PID);
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2024-01-17"
|
||||
codename: "Zed"
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2024-01-31"
|
||||
codename: "Zed"
|
|
@ -0,0 +1,2 @@
|
|||
The known issue with Bulk Export in HAPI 6.4.0 has been resolved. Bulk export functionality is now
|
||||
more performant at large scale, and does not generate occasional incomplete file reports.
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2023-02-27"
|
||||
codename: "Wizard"
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 4636
|
||||
title: "Fixed Oracle syntax problem with the fix that was previously provided via Pull Request 4630."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 4204
|
||||
jira: SMILE-4688
|
||||
backport: 6.4.2
|
||||
title: "With default configuration, Resource meta.tag properties: `userSelected` and `version`, were not stored in the database.
|
||||
This is now fixed."
|
|
@ -1,4 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 4548
|
||||
backport: 6.4.2
|
||||
title: "Simultaneous DELETE and $reindex operations could corrupt the search index. This has been fixed."
|
||||
|
|
|
@ -2,5 +2,6 @@
|
|||
type: fix
|
||||
issue: 5008
|
||||
jira: SMILE-6727
|
||||
backport: 6.4.6
|
||||
title: "Previously, the GenericClient would only support the HTTP GET method for paging requests.
|
||||
This has been corrected by adding the possibility for paging with an HTTP POST."
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Fixed migration related issues
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2023-12-06"
|
||||
codename: "Yucatán"
|
|
@ -0,0 +1,6 @@
|
|||
This release strictly attempts to remove as much usage of the LOB table as possible in postgres. Specifically, in the JPA server, several database columns related to Batch2 jobs and searching
|
||||
have been reworked so that they no will longer use LOB datatypes going forward. This
|
||||
is a significant advantage on Postgresql databases as it removes a significant use
|
||||
of the inefficient `pg_largeobject` table, and should yield performance boosts for
|
||||
MSSQL as well.
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2024-03-01"
|
||||
codename: "Yucatán"
|
|
@ -2,6 +2,6 @@
|
|||
type: fix
|
||||
issue: 5486
|
||||
jira: SMILE-7457
|
||||
backport: 6.10.1
|
||||
backport: 6.8.6,6.10.1
|
||||
title: "Previously, testing database migration with cli migrate-database command in dry-run mode would insert in the
|
||||
migration task table. The issue has been fixed."
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5511
|
||||
backport: 6.10.1
|
||||
backport: 6.8.6,6.10.1
|
||||
title: "Previously, when creating an index as a part of a migration, if the index already existed with a different name
|
||||
on Oracle, the migration would fail. This has been fixed so that the create index migration task now recovers with
|
||||
a warning message if the index already exists with a different name."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5563
|
||||
backport: 6.10.3
|
||||
title: "Previously, certain mdm configuration could lead to duplicate eid identifier
|
||||
entries in golden resources. This has been corrected"
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5589
|
||||
backport: 6.10.5
|
||||
title: "When encoding a Bundle, if resources in bundle entries had a value in
|
||||
`Bundle.entry.fullUrl` but no value in `Bundle.entry.resource.id`, the parser
|
||||
sometimes incorrectly moved these resources to be contained within other
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5589
|
||||
backport: 6.10.5
|
||||
title: "When encoding resources using the RDF parser, placeholder IDs (i.e. resource IDs
|
||||
starting with `urn:`) were not omitted as they are in the XML and JSON parsers. This has
|
||||
been corrected."
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5621
|
||||
backport: 6.10.4
|
||||
title: "Fixed a deadlock in resource conditional create."
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5623
|
||||
backport: 6.10.5
|
||||
title: "Previously, searches that used more than one chained `Bundle` `SearchParameter` (i.e. `Composition`) were only
|
||||
adding one condition to the underlying SQL query which resulted in incorrect search results. This has been fixed."
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2024-03-01"
|
||||
codename: "Zed"
|
|
@ -1,4 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5671
|
||||
backport: 7.0.1
|
||||
title: "Avoid lock contention by refreshing SearchParameter cache in a new transaction."
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
---
|
||||
type: security
|
||||
issue: 5717
|
||||
backport: 7.0.1
|
||||
title: "Fixed a potential XSS vulnerability in the HAPI FHIR Testpage Overlay module."
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5722
|
||||
backport: 7.0.1
|
||||
title: "An incorrect migration script caused a failure when upgrading to HAPI FHIR 7.0.0 on
|
||||
PostgreSQL if the database was not in the `public` schema. Thanks to GitHub
|
||||
user @pano-smals for the contribution!"
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5725
|
||||
jira: SMILE-8343
|
||||
title: "The recommended constructor was not present on hibernate dialects
|
||||
provided by HAPI FHIR, leading to warnings during startup, and failures
|
||||
in some cases. This has been corrected. Thanks to GitHub user
|
||||
@pano-smals for the contribution!"
|
|
@ -1,4 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5742
|
||||
title: Fixed behaviour of the _language query parameter. Now it is picked up as search parameter in the resource provider and filters accordingly. Thanks to Jens Villadsen (@jkiddo) for the contribution!
|
||||
backport: 7.0.1
|
||||
title: "Fixed behaviour of the _language query parameter. Now it is picked up as search parameter in the resource provider and filters accordingly. Thanks to Jens Villadsen (@jkiddo) for the contribution!"
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 5748
|
||||
backport: 6.8.7
|
||||
title: "In the JPA server, several database columns related to Batch2 jobs and searching
|
||||
have been reworked so that they no will longer use LOB datatypes going forward. This
|
||||
is a significant advantage on Postgresql databases as it removes a significant use
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5812
|
||||
title: "Previously, the 'planDefinition' resource parameter for the Dstu3 version of PlanDefinition/$apply was incorrectly defined as an R4 resource. This issue has been fixed."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: change
|
||||
issue: 5814
|
||||
title: "Extracted methods out of ResourceProviderFactory into ObservableSupplierSet so that functionality can be used by
|
||||
other services. Unit tests revealed a cleanup bug in MdmProviderLoader that is fixed in this MR."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5820
|
||||
title: "Unifying the code paths for Patient type export and Patient instance export.
|
||||
These paths should be the same, since type is defined by spec, but instance
|
||||
is just 'syntactic sugar' on top of that spec (and so should be the same).
|
||||
"
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5828
|
||||
title: "When batch 2 jobs with Reduction steps fail in the final part
|
||||
of the reduction step, this would often leave the job
|
||||
stuck in the FINALIZE state.
|
||||
This has been fixed; the job will now FAIL.
|
||||
"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 5838
|
||||
title: "Migration of remaining database columns still using the LOB datatypes. This change effectively cuts all ties
|
||||
with the inefficient `pg_largeobject` table."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5842
|
||||
title: "Fixed a bug where an NPE was being thrown when trying to serialize a FHIR fragment (e.g., backboneElement
|
||||
, compound datatype) to a string representation if the fragment contains a `Reference`."
|
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
type: add
|
||||
issue: 5845
|
||||
title: "Added a new pointcut: STORAGE_PRE_INITIATE_BULK_EXPORT.
|
||||
This pointcut is meant to be called explicitly before
|
||||
STORAGE_INITIATE_BULK_EXPORT, so that parameter
|
||||
manipulation that needs to be done before permission
|
||||
checks (currently done using STORAGE_INITIATE_BULK_EXPORT)
|
||||
can safely be done without risk of affecting permission checks/
|
||||
authorization.
|
||||
"
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 5850
|
||||
title: "Add additional Clinical Reasoning operations $collect-data and $data-requirements to HAPI-FHIR to expand capability
|
||||
for clinical reasoning module users. These operations will assist users in identifying data requirements for evaluating a measure, and what
|
||||
data was used for a measure evaluation"
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 5855
|
||||
title: "If using an OpenTelemetry agent, a span is now generated for each interceptor method call. The span is named
|
||||
as 'hapifhir.interceptor' and it has the following attributes about the interceptor:
|
||||
'hapifhir.interceptor.pointcut_name','hapifhir.interceptor.class_name', 'hapifhir.interceptor.method_name'"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5856
|
||||
title: "Previously, it was possible to execute the `$delete-expunge` operation on a resource even if the user did not
|
||||
have delete permissions for the given resource type. This has been fixed."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5874
|
||||
jira: SMILE-8149
|
||||
title: "Fixed a bug where 'List' would be incorrectly shown as 'ListResource' in the error response for a GET for an invalid resource."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5867
|
||||
title: "Previously, when adding multiple resources and defining a golden resource using MDM, the golden resource's tags were removed. This has been fixed"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 5877
|
||||
title: "Previously, updating a tokenParam with a value greater than 200 characters would raise a SQLException.
|
||||
This issue has been fixed."
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2024-08-18"
|
||||
codename: "Copernicus"
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package ca.uhn.fhir.jpa.model.dialect;
|
||||
|
||||
import org.hibernate.dialect.CockroachDialect;
|
||||
import org.hibernate.dialect.DatabaseVersion;
|
||||
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
|
||||
|
||||
/**
|
||||
* Dialect for CockroachDB database.
|
||||
|
@ -29,7 +29,11 @@ import org.hibernate.dialect.DatabaseVersion;
|
|||
public class HapiFhirCockroachDialect extends CockroachDialect {
|
||||
|
||||
public HapiFhirCockroachDialect() {
|
||||
super(DatabaseVersion.make(21, 1));
|
||||
super();
|
||||
}
|
||||
|
||||
public HapiFhirCockroachDialect(DialectResolutionInfo info) {
|
||||
super(info);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.model.dialect;
|
||||
|
||||
import org.hibernate.dialect.DatabaseVersion;
|
||||
import org.hibernate.dialect.DerbyDialect;
|
||||
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
|
||||
|
||||
/**
|
||||
* Dialect for Derby database.
|
||||
|
@ -28,12 +28,12 @@ import org.hibernate.dialect.DerbyDialect;
|
|||
*/
|
||||
public class HapiFhirDerbyDialect extends DerbyDialect {
|
||||
|
||||
public HapiFhirDerbyDialect(DatabaseVersion theDatabaseVersion) {
|
||||
super(theDatabaseVersion);
|
||||
public HapiFhirDerbyDialect() {
|
||||
super();
|
||||
}
|
||||
|
||||
public HapiFhirDerbyDialect() {
|
||||
super(DatabaseVersion.make(10, 14, 2));
|
||||
public HapiFhirDerbyDialect(DialectResolutionInfo info) {
|
||||
super(info);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.model.dialect;
|
||||
|
||||
import org.hibernate.dialect.DatabaseVersion;
|
||||
import org.hibernate.dialect.H2Dialect;
|
||||
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
|
||||
|
||||
/**
|
||||
* Dialect for H2 database.
|
||||
|
@ -28,12 +28,12 @@ import org.hibernate.dialect.H2Dialect;
|
|||
*/
|
||||
public class HapiFhirH2Dialect extends H2Dialect {
|
||||
|
||||
public HapiFhirH2Dialect(DatabaseVersion theDatabaseVersion) {
|
||||
super(theDatabaseVersion);
|
||||
public HapiFhirH2Dialect() {
|
||||
super();
|
||||
}
|
||||
|
||||
public HapiFhirH2Dialect() {
|
||||
super(DatabaseVersion.make(2, 2, 220));
|
||||
public HapiFhirH2Dialect(DialectResolutionInfo info) {
|
||||
super(info);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.model.dialect;
|
||||
|
||||
import org.hibernate.dialect.DatabaseVersion;
|
||||
import org.hibernate.dialect.MariaDBDialect;
|
||||
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
|
||||
|
||||
/**
|
||||
* Dialect for MySQL database.
|
||||
|
@ -28,12 +28,12 @@ import org.hibernate.dialect.MariaDBDialect;
|
|||
*/
|
||||
public class HapiFhirMariaDBDialect extends MariaDBDialect {
|
||||
|
||||
public HapiFhirMariaDBDialect(DatabaseVersion theDatabaseVersion) {
|
||||
super(theDatabaseVersion);
|
||||
public HapiFhirMariaDBDialect() {
|
||||
super();
|
||||
}
|
||||
|
||||
public HapiFhirMariaDBDialect() {
|
||||
super(DatabaseVersion.make(10, 11, 5));
|
||||
public HapiFhirMariaDBDialect(DialectResolutionInfo info) {
|
||||
super(info);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.model.dialect;
|
||||
|
||||
import org.hibernate.dialect.DatabaseVersion;
|
||||
import org.hibernate.dialect.MySQLDialect;
|
||||
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
|
||||
|
||||
/**
|
||||
* Dialect for MySQL database.
|
||||
|
@ -28,12 +28,12 @@ import org.hibernate.dialect.MySQLDialect;
|
|||
*/
|
||||
public class HapiFhirMySQLDialect extends MySQLDialect {
|
||||
|
||||
public HapiFhirMySQLDialect(DatabaseVersion theDatabaseVersion) {
|
||||
super(theDatabaseVersion);
|
||||
public HapiFhirMySQLDialect() {
|
||||
super();
|
||||
}
|
||||
|
||||
public HapiFhirMySQLDialect() {
|
||||
super(DatabaseVersion.make(5, 7));
|
||||
public HapiFhirMySQLDialect(DialectResolutionInfo info) {
|
||||
super(info);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.model.dialect;
|
||||
|
||||
import org.hibernate.dialect.DatabaseVersion;
|
||||
import org.hibernate.dialect.OracleDialect;
|
||||
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
|
||||
|
||||
/**
|
||||
* Dialect for Oracle database.
|
||||
|
@ -28,12 +28,12 @@ import org.hibernate.dialect.OracleDialect;
|
|||
*/
|
||||
public class HapiFhirOracleDialect extends OracleDialect {
|
||||
|
||||
public HapiFhirOracleDialect(DatabaseVersion theDatabaseVersion) {
|
||||
super(theDatabaseVersion);
|
||||
public HapiFhirOracleDialect() {
|
||||
super();
|
||||
}
|
||||
|
||||
public HapiFhirOracleDialect() {
|
||||
super(DatabaseVersion.make(12, 2));
|
||||
public HapiFhirOracleDialect(DialectResolutionInfo info) {
|
||||
super(info);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -19,17 +19,17 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.model.dialect;
|
||||
|
||||
import org.hibernate.dialect.DatabaseVersion;
|
||||
import org.hibernate.dialect.PostgreSQLDialect;
|
||||
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
|
||||
|
||||
public class HapiFhirPostgresDialect extends PostgreSQLDialect {
|
||||
|
||||
public HapiFhirPostgresDialect(DatabaseVersion theDatabaseVersion) {
|
||||
super(theDatabaseVersion);
|
||||
public HapiFhirPostgresDialect() {
|
||||
super();
|
||||
}
|
||||
|
||||
public HapiFhirPostgresDialect() {
|
||||
super(DatabaseVersion.make(10, 0, 0));
|
||||
public HapiFhirPostgresDialect(DialectResolutionInfo info) {
|
||||
super(info);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.model.dialect;
|
||||
|
||||
import org.hibernate.dialect.DatabaseVersion;
|
||||
import org.hibernate.dialect.SQLServerDialect;
|
||||
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
|
||||
|
||||
/**
|
||||
* Dialect for MS SQL Server database.
|
||||
|
@ -28,12 +28,12 @@ import org.hibernate.dialect.SQLServerDialect;
|
|||
*/
|
||||
public class HapiFhirSQLServerDialect extends SQLServerDialect {
|
||||
|
||||
public HapiFhirSQLServerDialect(DatabaseVersion theDatabaseVersion) {
|
||||
super(theDatabaseVersion);
|
||||
public HapiFhirSQLServerDialect() {
|
||||
super();
|
||||
}
|
||||
|
||||
public HapiFhirSQLServerDialect() {
|
||||
super(DatabaseVersion.make(11));
|
||||
public HapiFhirSQLServerDialect(DialectResolutionInfo info) {
|
||||
super(info);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
@ -50,7 +51,7 @@ import java.util.Date;
|
|||
import java.util.Optional;
|
||||
|
||||
@Transactional
|
||||
public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||
public class DatabaseBinaryContentStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
private EntityManager myEntityManager;
|
||||
|
@ -61,9 +62,9 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
|||
@Nonnull
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public StoredDetails storeBlob(
|
||||
public StoredDetails storeBinaryContent(
|
||||
IIdType theResourceId,
|
||||
String theBlobIdOrNull,
|
||||
String theBinaryContentIdOrNull,
|
||||
String theContentType,
|
||||
InputStream theInputStream,
|
||||
RequestDetails theRequestDetails)
|
||||
|
@ -82,14 +83,20 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
|||
|
||||
BinaryStorageEntity entity = new BinaryStorageEntity();
|
||||
entity.setResourceId(theResourceId.toUnqualifiedVersionless().getValue());
|
||||
entity.setBlobContentType(theContentType);
|
||||
entity.setContentType(theContentType);
|
||||
entity.setPublished(publishedDate);
|
||||
|
||||
Session session = (Session) myEntityManager.getDelegate();
|
||||
LobHelper lobHelper = session.getLobHelper();
|
||||
|
||||
byte[] loadedStream = IOUtils.toByteArray(countingInputStream);
|
||||
String id = super.provideIdForNewBlob(theBlobIdOrNull, loadedStream, theRequestDetails, theContentType);
|
||||
entity.setBlobId(id);
|
||||
String id = super.provideIdForNewBinaryContent(
|
||||
theBinaryContentIdOrNull, loadedStream, theRequestDetails, theContentType);
|
||||
|
||||
entity.setContentId(id);
|
||||
entity.setStorageContentBin(loadedStream);
|
||||
|
||||
// TODO: remove writing Blob in a future release
|
||||
Blob dataBlob = lobHelper.createBlob(loadedStream);
|
||||
entity.setBlob(dataBlob);
|
||||
|
||||
|
@ -103,7 +110,7 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
|||
myEntityManager.persist(entity);
|
||||
|
||||
return new StoredDetails()
|
||||
.setBlobId(id)
|
||||
.setBinaryContentId(id)
|
||||
.setBytes(bytes)
|
||||
.setPublished(publishedDate)
|
||||
.setHash(hash)
|
||||
|
@ -111,68 +118,98 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
|||
}
|
||||
|
||||
@Override
|
||||
public StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) {
|
||||
public StoredDetails fetchBinaryContentDetails(IIdType theResourceId, String theBinaryContentId) {
|
||||
|
||||
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(
|
||||
theBlobId, theResourceId.toUnqualifiedVersionless().getValue());
|
||||
theBinaryContentId, theResourceId.toUnqualifiedVersionless().getValue());
|
||||
if (entityOpt.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
BinaryStorageEntity entity = entityOpt.get();
|
||||
return new StoredDetails()
|
||||
.setBlobId(theBlobId)
|
||||
.setContentType(entity.getBlobContentType())
|
||||
.setBinaryContentId(theBinaryContentId)
|
||||
.setContentType(entity.getContentType())
|
||||
.setHash(entity.getHash())
|
||||
.setPublished(entity.getPublished())
|
||||
.setBytes(entity.getSize());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) throws IOException {
|
||||
public boolean writeBinaryContent(IIdType theResourceId, String theBinaryContentId, OutputStream theOutputStream)
|
||||
throws IOException {
|
||||
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(
|
||||
theBlobId, theResourceId.toUnqualifiedVersionless().getValue());
|
||||
theBinaryContentId, theResourceId.toUnqualifiedVersionless().getValue());
|
||||
if (entityOpt.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
copyBlobToOutputStream(theOutputStream, entityOpt.get());
|
||||
copyBinaryContentToOutputStream(theOutputStream, entityOpt.get());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void expungeBlob(IIdType theResourceId, String theBlobId) {
|
||||
public void expungeBinaryContent(IIdType theResourceId, String theBinaryContentId) {
|
||||
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(
|
||||
theBlobId, theResourceId.toUnqualifiedVersionless().getValue());
|
||||
theBinaryContentId, theResourceId.toUnqualifiedVersionless().getValue());
|
||||
entityOpt.ifPresent(
|
||||
theBinaryStorageEntity -> myBinaryStorageEntityDao.deleteByPid(theBinaryStorageEntity.getBlobId()));
|
||||
theBinaryStorageEntity -> myBinaryStorageEntityDao.deleteByPid(theBinaryStorageEntity.getContentId()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] fetchBlob(IIdType theResourceId, String theBlobId) throws IOException {
|
||||
public byte[] fetchBinaryContent(IIdType theResourceId, String theBinaryContentId) throws IOException {
|
||||
BinaryStorageEntity entityOpt = myBinaryStorageEntityDao
|
||||
.findByIdAndResourceId(
|
||||
theBlobId, theResourceId.toUnqualifiedVersionless().getValue())
|
||||
theBinaryContentId,
|
||||
theResourceId.toUnqualifiedVersionless().getValue())
|
||||
.orElseThrow(() -> new ResourceNotFoundException(
|
||||
"Unknown blob ID: " + theBlobId + " for resource ID " + theResourceId));
|
||||
"Unknown BinaryContent ID: " + theBinaryContentId + " for resource ID " + theResourceId));
|
||||
|
||||
return copyBlobToByteArray(entityOpt);
|
||||
return copyBinaryContentToByteArray(entityOpt);
|
||||
}
|
||||
|
||||
void copyBlobToOutputStream(OutputStream theOutputStream, BinaryStorageEntity theEntity) throws IOException {
|
||||
try (InputStream inputStream = theEntity.getBlob().getBinaryStream()) {
|
||||
void copyBinaryContentToOutputStream(OutputStream theOutputStream, BinaryStorageEntity theEntity)
|
||||
throws IOException {
|
||||
|
||||
try (InputStream inputStream = getBinaryContent(theEntity)) {
|
||||
IOUtils.copy(inputStream, theOutputStream);
|
||||
} catch (SQLException e) {
|
||||
throw new IOException(Msg.code(1341) + e);
|
||||
}
|
||||
}
|
||||
|
||||
byte[] copyBlobToByteArray(BinaryStorageEntity theEntity) throws IOException {
|
||||
try {
|
||||
return ByteStreams.toByteArray(theEntity.getBlob().getBinaryStream());
|
||||
byte[] copyBinaryContentToByteArray(BinaryStorageEntity theEntity) throws IOException {
|
||||
byte[] retVal;
|
||||
|
||||
try (InputStream inputStream = getBinaryContent(theEntity)) {
|
||||
retVal = ByteStreams.toByteArray(inputStream);
|
||||
} catch (SQLException e) {
|
||||
throw new IOException(Msg.code(1342) + e);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* The caller is responsible for closing the returned stream.
|
||||
*
|
||||
* @param theEntity
|
||||
* @return
|
||||
* @throws SQLException
|
||||
*/
|
||||
private InputStream getBinaryContent(BinaryStorageEntity theEntity) throws SQLException {
|
||||
InputStream retVal;
|
||||
|
||||
if (theEntity.hasStorageContent()) {
|
||||
retVal = new ByteArrayInputStream(theEntity.getStorageContentBin());
|
||||
} else if (theEntity.hasBlob()) {
|
||||
retVal = theEntity.getBlob().getBinaryStream();
|
||||
} else {
|
||||
retVal = new ByteArrayInputStream(new byte[0]);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
}
|
|
@ -208,8 +208,7 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
if (idChunk.size() >= 2) {
|
||||
List<ResourceTable> entityChunk = prefetchResourceTableHistoryAndProvenance(idChunk);
|
||||
|
||||
// fixme mb
|
||||
if (thePreFetchIndexes && false) {
|
||||
if (thePreFetchIndexes) {
|
||||
|
||||
prefetchByField("string", "myParamsString", ResourceTable::isParamsStringPopulated, entityChunk);
|
||||
prefetchByField("token", "myParamsToken", ResourceTable::isParamsTokenPopulated, entityChunk);
|
||||
|
@ -240,10 +239,8 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
Query query = myEntityManager.createQuery("select r, h "
|
||||
+ " FROM ResourceTable r "
|
||||
+ " LEFT JOIN fetch ResourceHistoryTable h "
|
||||
// fixme make null safe
|
||||
+ " on r.myVersion = h.myResourceVersion and r.id = h.myResourceId and r.myPartitionIdValue = h.myPartitionIdValue "
|
||||
+ " left join fetch ResourceHistoryProvenanceEntity p "
|
||||
+ " on p.myResourceHistoryTable = h and p.myPartitionIdValue = h.myPartitionIdValue "
|
||||
+ " on r.myVersion = h.myResourceVersion and r.id = h.myResourceId "
|
||||
+ " left join fetch h.myProvenance "
|
||||
+ " WHERE r.myId IN ( :IDS ) ");
|
||||
query.setParameter("IDS", idChunk);
|
||||
|
||||
|
|
|
@ -58,6 +58,7 @@ import ca.uhn.fhir.rest.api.Constants;
|
|||
import ca.uhn.fhir.util.IMetaTagSorter;
|
||||
import ca.uhn.fhir.util.MetaUtil;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
|
@ -470,7 +471,7 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
|||
res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
|
||||
IDao.RESOURCE_PID.put(res, theEntity.getResourceId());
|
||||
|
||||
if (theTagList != null) {
|
||||
if (CollectionUtils.isNotEmpty(theTagList)) {
|
||||
res.getMeta().getTag().clear();
|
||||
res.getMeta().getProfile().clear();
|
||||
res.getMeta().getSecurity().clear();
|
||||
|
|
|
@ -29,11 +29,11 @@ import java.util.Optional;
|
|||
|
||||
public interface IBinaryStorageEntityDao extends JpaRepository<BinaryStorageEntity, String>, IHapiFhirJpaRepository {
|
||||
|
||||
@Query("SELECT e FROM BinaryStorageEntity e WHERE e.myBlobId = :blob_id AND e.myResourceId = :resource_id")
|
||||
@Query("SELECT e FROM BinaryStorageEntity e WHERE e.myContentId = :content_id AND e.myResourceId = :resource_id")
|
||||
Optional<BinaryStorageEntity> findByIdAndResourceId(
|
||||
@Param("blob_id") String theBlobId, @Param("resource_id") String theResourceId);
|
||||
@Param("content_id") String theContentId, @Param("resource_id") String theResourceId);
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM BinaryStorageEntity t WHERE t.myBlobId = :pid")
|
||||
@Query("DELETE FROM BinaryStorageEntity t WHERE t.myContentId = :pid")
|
||||
void deleteByPid(@Param("pid") String theId);
|
||||
}
|
||||
|
|
|
@ -43,12 +43,14 @@ import jakarta.persistence.SequenceGenerator;
|
|||
import jakarta.persistence.Table;
|
||||
import jakarta.persistence.Temporal;
|
||||
import jakarta.persistence.TemporalType;
|
||||
import jakarta.persistence.Transient;
|
||||
import jakarta.persistence.UniqueConstraint;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.Length;
|
||||
import org.hibernate.search.engine.backend.types.Projectable;
|
||||
import org.hibernate.search.engine.backend.types.Searchable;
|
||||
import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.PropertyBinderRef;
|
||||
|
@ -56,7 +58,10 @@ import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.RoutingBinderR
|
|||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexingDependency;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ObjectPath;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBinding;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyValue;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
@ -68,6 +73,8 @@ import java.util.List;
|
|||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Objects.isNull;
|
||||
import static java.util.Objects.nonNull;
|
||||
import static org.apache.commons.lang3.StringUtils.left;
|
||||
import static org.apache.commons.lang3.StringUtils.length;
|
||||
|
||||
|
@ -165,15 +172,14 @@ public class TermConcept implements Serializable {
|
|||
@Column(name = "INDEX_STATUS", nullable = true)
|
||||
private Long myIndexStatus;
|
||||
|
||||
@Deprecated(since = "7.2.0")
|
||||
@Lob
|
||||
@Column(name = "PARENT_PIDS", nullable = true)
|
||||
@FullTextField(
|
||||
name = "myParentPids",
|
||||
searchable = Searchable.YES,
|
||||
projectable = Projectable.YES,
|
||||
analyzer = "conceptParentPidsAnalyzer")
|
||||
private String myParentPids;
|
||||
|
||||
@Column(name = "PARENT_PIDS_VC", nullable = true, length = Length.LONG32)
|
||||
private String myParentPidsVc;
|
||||
|
||||
@OneToMany(
|
||||
cascade = {},
|
||||
fetch = FetchType.LAZY,
|
||||
|
@ -356,8 +362,15 @@ public class TermConcept implements Serializable {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Transient
|
||||
@FullTextField(
|
||||
name = "myParentPids",
|
||||
searchable = Searchable.YES,
|
||||
projectable = Projectable.YES,
|
||||
analyzer = "conceptParentPidsAnalyzer")
|
||||
@IndexingDependency(derivedFrom = @ObjectPath({@PropertyValue(propertyName = "myParentPidsVc")}))
|
||||
public String getParentPidsAsString() {
|
||||
return myParentPids;
|
||||
return nonNull(myParentPidsVc) ? myParentPidsVc : myParentPids;
|
||||
}
|
||||
|
||||
public List<TermConceptParentChildLink> getParents() {
|
||||
|
@ -437,7 +450,7 @@ public class TermConcept implements Serializable {
|
|||
@PreUpdate
|
||||
@PrePersist
|
||||
public void prePersist() {
|
||||
if (myParentPids == null) {
|
||||
if (isNull(myParentPids) && isNull(myParentPidsVc)) {
|
||||
Set<Long> parentPids = new HashSet<>();
|
||||
TermConcept entity = this;
|
||||
parentPids(entity, parentPids);
|
||||
|
@ -464,6 +477,7 @@ public class TermConcept implements Serializable {
|
|||
}
|
||||
|
||||
public TermConcept setParentPids(String theParentPids) {
|
||||
myParentPidsVc = theParentPids;
|
||||
myParentPids = theParentPids;
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Entity;
|
||||
|
@ -41,6 +42,7 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
|
|||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.Length;
|
||||
import org.hibernate.annotations.JdbcTypeCode;
|
||||
import org.hibernate.search.engine.backend.types.Projectable;
|
||||
import org.hibernate.search.engine.backend.types.Searchable;
|
||||
|
@ -68,7 +70,7 @@ import static org.apache.commons.lang3.StringUtils.length;
|
|||
public class TermConceptProperty implements Serializable {
|
||||
public static final int MAX_PROPTYPE_ENUM_LENGTH = 6;
|
||||
private static final long serialVersionUID = 1L;
|
||||
private static final int MAX_LENGTH = 500;
|
||||
public static final int MAX_LENGTH = 500;
|
||||
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(
|
||||
|
@ -106,10 +108,14 @@ public class TermConceptProperty implements Serializable {
|
|||
@GenericField(name = "myValueString", searchable = Searchable.YES)
|
||||
private String myValue;
|
||||
|
||||
@Deprecated(since = "7.2.0")
|
||||
@Column(name = "PROP_VAL_LOB")
|
||||
@Lob()
|
||||
private byte[] myValueLob;
|
||||
|
||||
@Column(name = "PROP_VAL_BIN", nullable = true, length = Length.LONG32)
|
||||
private byte[] myValueBin;
|
||||
|
||||
@Enumerated(EnumType.ORDINAL)
|
||||
@Column(name = "PROP_TYPE", nullable = false, length = MAX_PROPTYPE_ENUM_LENGTH)
|
||||
@JdbcTypeCode(SqlTypes.INTEGER)
|
||||
|
@ -196,8 +202,8 @@ public class TermConceptProperty implements Serializable {
|
|||
* property, and the code for a {@link TermConceptPropertyTypeEnum#CODING coding} property.
|
||||
*/
|
||||
public String getValue() {
|
||||
if (hasValueLob()) {
|
||||
return getValueLobAsString();
|
||||
if (hasValueBin()) {
|
||||
return getValueBinAsString();
|
||||
}
|
||||
return myValue;
|
||||
}
|
||||
|
@ -208,36 +214,41 @@ public class TermConceptProperty implements Serializable {
|
|||
*/
|
||||
public TermConceptProperty setValue(String theValue) {
|
||||
if (theValue.length() > MAX_LENGTH) {
|
||||
setValueLob(theValue);
|
||||
setValueBin(theValue);
|
||||
} else {
|
||||
myValueLob = null;
|
||||
myValueBin = null;
|
||||
}
|
||||
myValue = left(theValue, MAX_LENGTH);
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean hasValueLob() {
|
||||
public boolean hasValueBin() {
|
||||
if (myValueBin != null && myValueBin.length > 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (myValueLob != null && myValueLob.length > 0) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public byte[] getValueLob() {
|
||||
return myValueLob;
|
||||
}
|
||||
|
||||
public TermConceptProperty setValueLob(byte[] theValueLob) {
|
||||
myValueLob = theValueLob;
|
||||
public TermConceptProperty setValueBin(byte[] theValueBin) {
|
||||
myValueBin = theValueBin;
|
||||
myValueLob = theValueBin;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TermConceptProperty setValueLob(String theValueLob) {
|
||||
myValueLob = theValueLob.getBytes(StandardCharsets.UTF_8);
|
||||
return this;
|
||||
public TermConceptProperty setValueBin(String theValueBin) {
|
||||
return setValueBin(theValueBin.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
public String getValueLobAsString() {
|
||||
public String getValueBinAsString() {
|
||||
if (myValueBin != null && myValueBin.length > 0) {
|
||||
return new String(myValueBin, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
return new String(myValueLob, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
|
@ -295,4 +306,24 @@ public class TermConceptProperty implements Serializable {
|
|||
public Long getPid() {
|
||||
return myId;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public byte[] getValueBlobForTesting() {
|
||||
return myValueLob;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setValueBlobForTesting(byte[] theValueLob) {
|
||||
myValueLob = theValueLob;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public byte[] getValueBinForTesting() {
|
||||
return myValueBin;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setValueBinForTesting(byte[] theValuebin) {
|
||||
myValueBin = theValuebin;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,11 +40,13 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
|
|||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.Length;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotEmpty;
|
||||
import static org.apache.commons.lang3.StringUtils.left;
|
||||
import static org.apache.commons.lang3.StringUtils.length;
|
||||
|
||||
|
@ -98,10 +100,14 @@ public class TermValueSetConcept implements Serializable {
|
|||
@Column(name = "SOURCE_PID", nullable = true)
|
||||
private Long mySourceConceptPid;
|
||||
|
||||
@Deprecated(since = "7.2.0")
|
||||
@Lob
|
||||
@Column(name = "SOURCE_DIRECT_PARENT_PIDS", nullable = true)
|
||||
private String mySourceConceptDirectParentPids;
|
||||
|
||||
@Column(name = "SOURCE_DIRECT_PARENT_PIDS_VC", nullable = true, length = Length.LONG32)
|
||||
private String mySourceConceptDirectParentPidsVc;
|
||||
|
||||
@Column(name = "SYSTEM_URL", nullable = false, length = TermCodeSystem.MAX_URL_LENGTH)
|
||||
private String mySystem;
|
||||
|
||||
|
@ -264,7 +270,7 @@ public class TermValueSetConcept implements Serializable {
|
|||
.append("valueSetName", this.getValueSetName())
|
||||
.append("display", myDisplay)
|
||||
.append("designationCount", myDesignations != null ? myDesignations.size() : "(null)")
|
||||
.append("parentPids", mySourceConceptDirectParentPids)
|
||||
.append("parentPids", getSourceConceptDirectParentPids())
|
||||
.toString();
|
||||
}
|
||||
|
||||
|
@ -282,5 +288,12 @@ public class TermValueSetConcept implements Serializable {
|
|||
|
||||
public void setSourceConceptDirectParentPids(String theSourceConceptDirectParentPids) {
|
||||
mySourceConceptDirectParentPids = theSourceConceptDirectParentPids;
|
||||
mySourceConceptDirectParentPidsVc = theSourceConceptDirectParentPids;
|
||||
}
|
||||
|
||||
public String getSourceConceptDirectParentPids() {
|
||||
return isNotEmpty(mySourceConceptDirectParentPidsVc)
|
||||
? mySourceConceptDirectParentPidsVc
|
||||
: mySourceConceptDirectParentPids;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -127,7 +127,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
|
||||
Builder version = forVersion(VersionEnum.V7_2_0);
|
||||
|
||||
// allow null codes in concept map targets
|
||||
// allow null codes in concept map targets (see comment on "20190722.27" if you are going to change this)
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
|
||||
.modifyColumn("20240327.1", "TARGET_CODE")
|
||||
.nullable()
|
||||
|
@ -139,6 +139,62 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
forcedId.dropIndex("20240402.2", "IDX_FORCEDID_RESID");
|
||||
forcedId.dropIndex("20240402.3", "IDX_FORCEDID_TYPE_FID");
|
||||
forcedId.dropIndex("20240402.4", "IDX_FORCEID_FID");
|
||||
|
||||
// Migration from LOB
|
||||
{
|
||||
Builder.BuilderWithTableName binaryStorageBlobTable = version.onTable("HFJ_BINARY_STORAGE_BLOB");
|
||||
|
||||
binaryStorageBlobTable
|
||||
.renameColumn("20240404.1", "BLOB_ID", "CONTENT_ID")
|
||||
.renameColumn("20240404.2", "BLOB_SIZE", "CONTENT_SIZE")
|
||||
.renameColumn("20240404.3", "BLOB_HASH", "CONTENT_HASH");
|
||||
|
||||
binaryStorageBlobTable
|
||||
.modifyColumn("20240404.4", "BLOB_DATA")
|
||||
.nullable()
|
||||
.withType(ColumnTypeEnum.BLOB);
|
||||
|
||||
binaryStorageBlobTable
|
||||
.addColumn("20240404.5", "STORAGE_CONTENT_BIN")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.BINARY);
|
||||
|
||||
binaryStorageBlobTable.migrateBlobToBinary("20240404.6", "BLOB_DATA", "STORAGE_CONTENT_BIN");
|
||||
|
||||
binaryStorageBlobTable.renameTable("20240404.7", "HFJ_BINARY_STORAGE");
|
||||
}
|
||||
|
||||
{
|
||||
Builder.BuilderWithTableName termConceptPropertyTable = version.onTable("TRM_CONCEPT_PROPERTY");
|
||||
|
||||
termConceptPropertyTable
|
||||
.addColumn("20240409.1", "PROP_VAL_BIN")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.BINARY);
|
||||
|
||||
termConceptPropertyTable.migrateBlobToBinary("20240409.2", "PROP_VAL_LOB", "PROP_VAL_BIN");
|
||||
}
|
||||
|
||||
{
|
||||
Builder.BuilderWithTableName termValueSetConceptTable = version.onTable("TRM_VALUESET_CONCEPT");
|
||||
termValueSetConceptTable
|
||||
.addColumn("20240409.3", "SOURCE_DIRECT_PARENT_PIDS_VC")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.TEXT);
|
||||
|
||||
termValueSetConceptTable.migrateClobToText(
|
||||
"20240409.4", "SOURCE_DIRECT_PARENT_PIDS", "SOURCE_DIRECT_PARENT_PIDS_VC");
|
||||
}
|
||||
|
||||
{
|
||||
Builder.BuilderWithTableName termConceptTable = version.onTable("TRM_CONCEPT");
|
||||
termConceptTable
|
||||
.addColumn("20240410.1", "PARENT_PIDS_VC")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.TEXT);
|
||||
|
||||
termConceptTable.migrateClobToText("20240410.2", "PARENT_PIDS", "PARENT_PIDS_VC");
|
||||
}
|
||||
}
|
||||
|
||||
protected void init700() {
|
||||
|
@ -2489,10 +2545,26 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.modifyColumn("20190722.26", "SYSTEM_VERSION")
|
||||
.nullable()
|
||||
.withType(ColumnTypeEnum.STRING, 200);
|
||||
|
||||
/*
|
||||
DISABLED THIS STEP (20190722.27) ON PURPOSE BECAUSE IT STARTED CAUSING FAILURES ON MSSQL FOR A FRESH DB.
|
||||
I left it here for historical purposes.
|
||||
The reason for the failure is as follows. The TARGET_CODE column was originally 'not nullable' when it was
|
||||
first introduced. And in 7_2_0, it is being changed to a nullable column (see 20240327.1 in init720()).
|
||||
Starting with 7_2_0, on a fresh db, we create the table with nullable TARGET_CODE (as it is made nullable now).
|
||||
Since we run all migration steps on fresh db, this step will try to convert the column which is created as nullable
|
||||
to not nullable (which will then need to be coverted back to nullable in 7_2_0 migration).
|
||||
Changing a nullable column to not nullable is not allowed in
|
||||
MSSQL if there is an index on the column, which is the case here, as there is IDX_CNCPT_MP_GRP_ELM_TGT_CD
|
||||
on this column. Since init720() has the right migration
|
||||
step, where the column is set to nullable and has the right type and length, this statement is also
|
||||
not necessary anymore even for not fresh dbs.
|
||||
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
|
||||
.modifyColumn("20190722.27", "TARGET_CODE")
|
||||
.nonNullable()
|
||||
.withType(ColumnTypeEnum.STRING, 500);
|
||||
*/
|
||||
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
|
||||
.modifyColumn("20190722.28", "VALUESET_URL")
|
||||
.nullable()
|
||||
|
|
|
@ -243,7 +243,7 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
|||
*/
|
||||
private byte[] fetchBlobFromBinary(IBaseBinary theBinary) throws IOException {
|
||||
if (myBinaryStorageSvc != null && !(myBinaryStorageSvc instanceof NullBinaryStorageSvcImpl)) {
|
||||
return myBinaryStorageSvc.fetchDataBlobFromBinary(theBinary);
|
||||
return myBinaryStorageSvc.fetchDataByteArrayFromBinary(theBinary);
|
||||
} else {
|
||||
byte[] value = BinaryUtil.getOrCreateData(myCtx, theBinary).getValue();
|
||||
if (value == null) {
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
public class TermConceptPropertyTest {
|
||||
|
||||
private static final String ourVeryLongString = Strings.repeat("a", TermConceptProperty.MAX_LENGTH+1);
|
||||
|
||||
@Test
|
||||
public void testSetValue_whenValueExceedsMAX_LENGTH_willWriteToBlobAndBin(){
|
||||
// given
|
||||
TermConceptProperty termConceptProperty = new TermConceptProperty();
|
||||
|
||||
// when
|
||||
termConceptProperty.setValue(ourVeryLongString);
|
||||
|
||||
// then
|
||||
assertThat(termConceptProperty.getValueBlobForTesting(), notNullValue());
|
||||
assertThat(termConceptProperty.getValueBinForTesting(), notNullValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHasValueBin_willDefaultToAssertingValueBin(){
|
||||
// given
|
||||
TermConceptProperty termConceptProperty = new TermConceptProperty();
|
||||
termConceptProperty.setValueBinForTesting(ourVeryLongString.getBytes());
|
||||
termConceptProperty.setValueBlobForTesting(null);
|
||||
|
||||
// when/then
|
||||
assertThat(termConceptProperty.hasValueBin(), is(true));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHasValueBin_willAssertValueBlob_whenValueBinNotPresent(){
|
||||
// given
|
||||
TermConceptProperty termConceptProperty = new TermConceptProperty();
|
||||
termConceptProperty.setValueBinForTesting(null);
|
||||
termConceptProperty.setValueBlobForTesting(ourVeryLongString.getBytes());
|
||||
|
||||
// when/then
|
||||
assertThat(termConceptProperty.hasValueBin(), is(true));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetValue_whenValueExceedsMAX_LENGTH_willGetValueBinByDefault(){
|
||||
// given
|
||||
TermConceptProperty termConceptProperty = new TermConceptProperty();
|
||||
termConceptProperty.setValueBinForTesting(ourVeryLongString.getBytes());
|
||||
termConceptProperty.setValueBlobForTesting(null);
|
||||
|
||||
// when
|
||||
String value = termConceptProperty.getValue();
|
||||
|
||||
// then
|
||||
assertThat(value, startsWith("a"));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetValue_whenOnlyValueBlobIsSet_willGetValueValueBlob(){
|
||||
// given
|
||||
TermConceptProperty termConceptProperty = new TermConceptProperty();
|
||||
termConceptProperty.setValueBinForTesting(null);
|
||||
termConceptProperty.setValueBlobForTesting(ourVeryLongString.getBytes());
|
||||
|
||||
// when
|
||||
String value = termConceptProperty.getValue();
|
||||
|
||||
// then
|
||||
assertThat(value, startsWith("a"));
|
||||
}
|
||||
|
||||
}
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -26,37 +26,44 @@ import jakarta.persistence.Lob;
|
|||
import jakarta.persistence.Table;
|
||||
import jakarta.persistence.Temporal;
|
||||
import jakarta.persistence.TemporalType;
|
||||
import org.hibernate.Length;
|
||||
|
||||
import java.sql.Blob;
|
||||
import java.util.Date;
|
||||
|
||||
import static java.util.Objects.nonNull;
|
||||
|
||||
@Entity
|
||||
@Table(name = "HFJ_BINARY_STORAGE_BLOB")
|
||||
@Table(name = "HFJ_BINARY_STORAGE")
|
||||
public class BinaryStorageEntity {
|
||||
|
||||
@Id
|
||||
@Column(name = "BLOB_ID", length = 200, nullable = false)
|
||||
// N.B GGG: Note that the `blob id` is the same as the `externalized binary id`.
|
||||
private String myBlobId;
|
||||
@Column(name = "CONTENT_ID", length = 200, nullable = false)
|
||||
// N.B GGG: Note that the `content id` is the same as the `externalized binary id`.
|
||||
private String myContentId;
|
||||
|
||||
@Column(name = "RESOURCE_ID", length = 100, nullable = false)
|
||||
private String myResourceId;
|
||||
|
||||
@Column(name = "BLOB_SIZE", nullable = true)
|
||||
@Column(name = "CONTENT_SIZE", nullable = true)
|
||||
private long mySize;
|
||||
|
||||
@Column(name = "CONTENT_TYPE", nullable = false, length = 100)
|
||||
private String myBlobContentType;
|
||||
private String myContentType;
|
||||
|
||||
@Lob
|
||||
@Column(name = "BLOB_DATA", nullable = false, insertable = true, updatable = false)
|
||||
@Deprecated(since = "7.2.0")
|
||||
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
|
||||
@Column(name = "BLOB_DATA", nullable = true, insertable = true, updatable = false)
|
||||
private Blob myBlob;
|
||||
|
||||
@Column(name = "STORAGE_CONTENT_BIN", nullable = true, length = Length.LONG32)
|
||||
private byte[] myStorageContentBin;
|
||||
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "PUBLISHED_DATE", nullable = false)
|
||||
private Date myPublished;
|
||||
|
||||
@Column(name = "BLOB_HASH", length = 128, nullable = true)
|
||||
@Column(name = "CONTENT_HASH", length = 128, nullable = true)
|
||||
private String myHash;
|
||||
|
||||
public Date getPublished() {
|
||||
|
@ -71,8 +78,8 @@ public class BinaryStorageEntity {
|
|||
return myHash;
|
||||
}
|
||||
|
||||
public void setBlobId(String theBlobId) {
|
||||
myBlobId = theBlobId;
|
||||
public void setContentId(String theContentId) {
|
||||
myContentId = theContentId;
|
||||
}
|
||||
|
||||
public void setResourceId(String theResourceId) {
|
||||
|
@ -83,12 +90,12 @@ public class BinaryStorageEntity {
|
|||
return mySize;
|
||||
}
|
||||
|
||||
public String getBlobContentType() {
|
||||
return myBlobContentType;
|
||||
public String getContentType() {
|
||||
return myContentType;
|
||||
}
|
||||
|
||||
public void setBlobContentType(String theBlobContentType) {
|
||||
myBlobContentType = theBlobContentType;
|
||||
public void setContentType(String theContentType) {
|
||||
myContentType = theContentType;
|
||||
}
|
||||
|
||||
public Blob getBlob() {
|
||||
|
@ -99,8 +106,8 @@ public class BinaryStorageEntity {
|
|||
myBlob = theBlob;
|
||||
}
|
||||
|
||||
public String getBlobId() {
|
||||
return myBlobId;
|
||||
public String getContentId() {
|
||||
return myContentId;
|
||||
}
|
||||
|
||||
public void setSize(long theSize) {
|
||||
|
@ -110,4 +117,21 @@ public class BinaryStorageEntity {
|
|||
public void setHash(String theHash) {
|
||||
myHash = theHash;
|
||||
}
|
||||
|
||||
public byte[] getStorageContentBin() {
|
||||
return myStorageContentBin;
|
||||
}
|
||||
|
||||
public BinaryStorageEntity setStorageContentBin(byte[] theStorageContentBin) {
|
||||
myStorageContentBin = theStorageContentBin;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean hasStorageContent() {
|
||||
return nonNull(myStorageContentBin);
|
||||
}
|
||||
|
||||
public boolean hasBlob() {
|
||||
return nonNull(myBlob);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import jakarta.persistence.Index;
|
|||
import jakarta.persistence.JoinColumn;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.PrePersist;
|
||||
import jakarta.persistence.PreUpdate;
|
||||
import jakarta.persistence.SequenceGenerator;
|
||||
import jakarta.persistence.Table;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
@ -429,6 +430,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
|||
* We don't truncate earlier in the flow because the index hashes MUST be calculated on the full string.
|
||||
*/
|
||||
@PrePersist
|
||||
@PreUpdate
|
||||
public void truncateFieldsForDB() {
|
||||
mySystem = StringUtils.truncate(mySystem, MAX_LENGTH);
|
||||
myValue = StringUtils.truncate(myValue, MAX_LENGTH);
|
||||
|
|
|
@ -97,7 +97,7 @@ public class StorageSettings {
|
|||
private boolean myDefaultSearchParamsCanBeOverridden = true;
|
||||
private Set<Subscription.SubscriptionChannelType> mySupportedSubscriptionTypes = new HashSet<>();
|
||||
private boolean myAutoCreatePlaceholderReferenceTargets;
|
||||
private boolean myCrossPartitionSubscriptionEnabled = false;
|
||||
private boolean myCrossPartitionSubscriptionEnabled = true;
|
||||
private Integer myBundleBatchPoolSize = DEFAULT_BUNDLE_BATCH_POOL_SIZE;
|
||||
private Integer myBundleBatchMaxPoolSize = DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE;
|
||||
private boolean myEnableInMemorySubscriptionMatching = true;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.1.7-SNAPSHOT</version>
|
||||
<version>7.3.0-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package ca.uhn.fhir.jpa.subscription.model.config;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.subscription.match.matcher.matching.SubscriptionStrategyEvaluator;
|
||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
@ -29,8 +30,9 @@ import org.springframework.context.annotation.Configuration;
|
|||
public class SubscriptionModelConfig {
|
||||
|
||||
@Bean
|
||||
public SubscriptionCanonicalizer subscriptionCanonicalizer(FhirContext theFhirContext) {
|
||||
return new SubscriptionCanonicalizer(theFhirContext);
|
||||
public SubscriptionCanonicalizer subscriptionCanonicalizer(
|
||||
FhirContext theFhirContext, StorageSettings theStorageSettings) {
|
||||
return new SubscriptionCanonicalizer(theFhirContext, theStorageSettings);
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
|
|
@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer;
|
||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
||||
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage;
|
||||
|
@ -50,7 +51,7 @@ public class SubscriptionRegisteringSubscriberTest {
|
|||
@Mock
|
||||
private SubscriptionRegistry mySubscriptionRegistry;
|
||||
@Spy
|
||||
private SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(myFhirContext);
|
||||
private SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(myFhirContext, new StorageSettings());
|
||||
@Mock
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Mock
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.subscription.match.registry;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.subscription.channel.subscription.ISubscriptionDeliveryChannelNamer;
|
||||
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelRegistry;
|
||||
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription;
|
||||
|
@ -33,7 +34,7 @@ public class SubscriptionRegistryTest {
|
|||
static FhirContext ourFhirContext = FhirContext.forR4Cached();
|
||||
|
||||
@Spy
|
||||
SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(ourFhirContext);
|
||||
SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(ourFhirContext, new StorageSettings());
|
||||
|
||||
@Spy
|
||||
ISubscriptionDeliveryChannelNamer mySubscriptionDeliveryChannelNamer = new TestChannelNamer();
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.subscription.module;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription;
|
||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer;
|
||||
import ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryJsonMessage;
|
||||
|
@ -8,12 +9,15 @@ import ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage;
|
|||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.assertj.core.util.Lists;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.Subscription;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -66,7 +70,7 @@ public class CanonicalSubscriptionTest {
|
|||
|
||||
@Test
|
||||
public void testCanonicalSubscriptionRetainsMetaTags() throws IOException {
|
||||
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4());
|
||||
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), new StorageSettings());
|
||||
CanonicalSubscription sub1 = canonicalizer.canonicalize(makeMdmSubscription());
|
||||
assertTrue(sub1.getTags().keySet().contains(TAG_SYSTEM));
|
||||
assertEquals(sub1.getTags().get(TAG_SYSTEM), TAG_VALUE);
|
||||
|
@ -74,7 +78,7 @@ public class CanonicalSubscriptionTest {
|
|||
|
||||
@Test
|
||||
public void emailDetailsEquals() {
|
||||
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4());
|
||||
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), new StorageSettings());
|
||||
CanonicalSubscription sub1 = canonicalizer.canonicalize(makeEmailSubscription());
|
||||
CanonicalSubscription sub2 = canonicalizer.canonicalize(makeEmailSubscription());
|
||||
assertTrue(sub1.equals(sub2));
|
||||
|
@ -82,7 +86,7 @@ public class CanonicalSubscriptionTest {
|
|||
|
||||
@Test
|
||||
public void testSerializeMultiPartitionSubscription(){
|
||||
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4());
|
||||
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), new StorageSettings());
|
||||
Subscription subscription = makeEmailSubscription();
|
||||
subscription.addExtension(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION, new BooleanType().setValue(true));
|
||||
CanonicalSubscription canonicalSubscription = canonicalizer.canonicalize(subscription);
|
||||
|
@ -90,28 +94,30 @@ public class CanonicalSubscriptionTest {
|
|||
assertEquals(canonicalSubscription.getCrossPartitionEnabled(), true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSerializeIncorrectMultiPartitionSubscription(){
|
||||
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4());
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
public void testSerializeIncorrectMultiPartitionSubscription(boolean theIsCrossPartitionEnabled){
|
||||
final StorageSettings storageSettings = buildStorageSettings(theIsCrossPartitionEnabled);
|
||||
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), storageSettings);
|
||||
Subscription subscription = makeEmailSubscription();
|
||||
subscription.addExtension(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION, new StringType().setValue("false"));
|
||||
CanonicalSubscription canonicalSubscription = canonicalizer.canonicalize(subscription);
|
||||
|
||||
System.out.print(canonicalSubscription);
|
||||
|
||||
assertEquals(canonicalSubscription.getCrossPartitionEnabled(), false);
|
||||
assertEquals(canonicalSubscription.getCrossPartitionEnabled(), theIsCrossPartitionEnabled);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSerializeNonMultiPartitionSubscription(){
|
||||
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4());
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
public void testSerializeNonMultiPartitionSubscription(boolean theIsCrossPartitionEnabled){
|
||||
final StorageSettings storageSettings = buildStorageSettings(theIsCrossPartitionEnabled);
|
||||
SubscriptionCanonicalizer canonicalizer = new SubscriptionCanonicalizer(FhirContext.forR4(), storageSettings);
|
||||
Subscription subscription = makeEmailSubscription();
|
||||
subscription.addExtension(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION, new BooleanType().setValue(false));
|
||||
CanonicalSubscription canonicalSubscription = canonicalizer.canonicalize(subscription);
|
||||
|
||||
System.out.print(canonicalSubscription);
|
||||
|
||||
assertEquals(canonicalSubscription.getCrossPartitionEnabled(), false);
|
||||
assertEquals(canonicalSubscription.getCrossPartitionEnabled(), theIsCrossPartitionEnabled);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -154,4 +160,11 @@ public class CanonicalSubscriptionTest {
|
|||
ResourceDeliveryMessage payload = resourceDeliveryMessage.getPayload();
|
||||
return payload.getSubscription();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static StorageSettings buildStorageSettings(boolean theIsCrossPartitionEnabled) {
|
||||
final StorageSettings storageSettings = new StorageSettings();
|
||||
storageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled);
|
||||
return storageSettings;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,8 @@ import org.junit.jupiter.api.AfterEach;
|
|||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.mockito.Answers;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
|
@ -38,6 +40,7 @@ import java.util.Optional;
|
|||
|
||||
import static ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionCriteriaParser.TypeEnum.STARTYPE_EXPRESSION;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.atLeastOnce;
|
||||
|
@ -227,8 +230,10 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||
ourObservationListener.awaitExpected();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubscriptionAndResourceOnDiffPartitionNotMatch() throws InterruptedException {
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
public void testSubscriptionAndResourceOnDiffPartitionNotMatch(boolean theIsCrossPartitionEnabled) throws InterruptedException {
|
||||
myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled);
|
||||
myPartitionSettings.setPartitioningEnabled(true);
|
||||
String payload = "application/fhir+json";
|
||||
|
||||
|
@ -240,13 +245,18 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||
mockSubscriptionRead(requestPartitionId, subscription);
|
||||
sendSubscription(subscription, requestPartitionId, true);
|
||||
|
||||
mySubscriptionResourceNotMatched.setExpectedCount(1);
|
||||
sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(0));
|
||||
mySubscriptionResourceNotMatched.awaitExpected();
|
||||
final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(0));
|
||||
if (theIsCrossPartitionEnabled) {
|
||||
runWithinLatchLogicExpectSuccess(throwsInterrupted);
|
||||
} else {
|
||||
runWithLatchLogicExpectFailure(throwsInterrupted);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubscriptionAndResourceOnDiffPartitionNotMatchPart2() throws InterruptedException {
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
public void testSubscriptionAndResourceOnDiffPartitionNotMatchPart2(boolean theIsCrossPartitionEnabled) throws InterruptedException {
|
||||
myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled);
|
||||
myPartitionSettings.setPartitioningEnabled(true);
|
||||
String payload = "application/fhir+json";
|
||||
|
||||
|
@ -258,13 +268,19 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||
mockSubscriptionRead(requestPartitionId, subscription);
|
||||
sendSubscription(subscription, requestPartitionId, true);
|
||||
|
||||
mySubscriptionResourceNotMatched.setExpectedCount(1);
|
||||
sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(1));
|
||||
mySubscriptionResourceNotMatched.awaitExpected();
|
||||
final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(1));
|
||||
|
||||
if (theIsCrossPartitionEnabled) {
|
||||
runWithinLatchLogicExpectSuccess(throwsInterrupted);
|
||||
} else {
|
||||
runWithLatchLogicExpectFailure(throwsInterrupted);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubscriptionOnDefaultPartitionAndResourceOnDiffPartitionNotMatch() throws InterruptedException {
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
public void testSubscriptionOnDefaultPartitionAndResourceOnDiffPartitionNotMatch(boolean theIsCrossPartitionEnabled) throws InterruptedException {
|
||||
myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled);
|
||||
myPartitionSettings.setPartitioningEnabled(true);
|
||||
String payload = "application/fhir+json";
|
||||
|
||||
|
@ -276,13 +292,19 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||
mockSubscriptionRead(requestPartitionId, subscription);
|
||||
sendSubscription(subscription, requestPartitionId, true);
|
||||
|
||||
mySubscriptionResourceNotMatched.setExpectedCount(1);
|
||||
sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(1));
|
||||
mySubscriptionResourceNotMatched.awaitExpected();
|
||||
final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionId(1));
|
||||
|
||||
if (theIsCrossPartitionEnabled) {
|
||||
runWithinLatchLogicExpectSuccess(throwsInterrupted);
|
||||
} else {
|
||||
runWithLatchLogicExpectFailure(throwsInterrupted);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubscriptionOnAPartitionAndResourceOnDefaultPartitionNotMatch() throws InterruptedException {
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
public void testSubscriptionOnAPartitionAndResourceOnDefaultPartitionNotMatch(boolean theIsCrossPartitionEnabled) throws InterruptedException {
|
||||
myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled);
|
||||
myPartitionSettings.setPartitioningEnabled(true);
|
||||
String payload = "application/fhir+json";
|
||||
|
||||
|
@ -294,9 +316,13 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||
mockSubscriptionRead(requestPartitionId, subscription);
|
||||
sendSubscription(subscription, requestPartitionId, true);
|
||||
|
||||
mySubscriptionResourceNotMatched.setExpectedCount(1);
|
||||
sendObservation(code, "SNOMED-CT", RequestPartitionId.defaultPartition());
|
||||
mySubscriptionResourceNotMatched.awaitExpected();
|
||||
final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.defaultPartition());
|
||||
|
||||
if (theIsCrossPartitionEnabled) {
|
||||
runWithinLatchLogicExpectSuccess(throwsInterrupted);
|
||||
} else {
|
||||
runWithLatchLogicExpectFailure(throwsInterrupted);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -320,8 +346,10 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||
ourObservationListener.awaitExpected();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubscriptionOnOnePartitionDoNotMatchResourceOnMultiplePartitions() throws InterruptedException {
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
public void testSubscriptionOnOnePartitionDoNotMatchResourceOnMultiplePartitions(boolean theIsCrossPartitionEnabled) throws InterruptedException {
|
||||
myStorageSettings.setCrossPartitionSubscriptionEnabled(theIsCrossPartitionEnabled);
|
||||
myPartitionSettings.setPartitioningEnabled(true);
|
||||
String payload = "application/fhir+json";
|
||||
|
||||
|
@ -333,10 +361,13 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||
mockSubscriptionRead(requestPartitionId, subscription);
|
||||
sendSubscription(subscription, requestPartitionId, true);
|
||||
|
||||
mySubscriptionResourceNotMatched.setExpectedCount(1);
|
||||
List<Integer> partitionId = Collections.synchronizedList(Lists.newArrayList(0, 2));
|
||||
sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionIds(partitionId));
|
||||
mySubscriptionResourceNotMatched.awaitExpected();
|
||||
final ThrowsInterrupted throwsInterrupted = () -> sendObservation(code, "SNOMED-CT", RequestPartitionId.fromPartitionIds(Collections.synchronizedList(Lists.newArrayList(0, 2))));
|
||||
|
||||
if (theIsCrossPartitionEnabled) {
|
||||
runWithinLatchLogicExpectSuccess(throwsInterrupted);
|
||||
} else {
|
||||
runWithLatchLogicExpectFailure(throwsInterrupted);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -519,4 +550,31 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||
verify(message, atLeastOnce()).getPayloadId(null);
|
||||
}
|
||||
}
|
||||
|
||||
private interface ThrowsInterrupted {
|
||||
void runOrThrow() throws InterruptedException;
|
||||
}
|
||||
|
||||
private void runWithLatchLogicExpectFailure(ThrowsInterrupted theRunnable) {
|
||||
try {
|
||||
mySubscriptionResourceNotMatched.setExpectedCount(1);
|
||||
theRunnable.runOrThrow();
|
||||
mySubscriptionResourceNotMatched.awaitExpected();
|
||||
} catch (InterruptedException exception) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
|
||||
private void runWithinLatchLogicExpectSuccess(ThrowsInterrupted theRunnable) {
|
||||
try {
|
||||
ourObservationListener.setExpectedCount(1);
|
||||
mySubscriptionResourceMatched.setExpectedCount(1);
|
||||
theRunnable.runOrThrow();
|
||||
mySubscriptionResourceMatched.awaitExpected();
|
||||
ourObservationListener.awaitExpected();
|
||||
} catch (InterruptedException exception) {
|
||||
fail();
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue