Merge remote-tracking branch 'origin/master' into do-20240206-core-bump-6-2-16

This commit is contained in:
dotasek.dev 2024-02-28 11:17:49 -05:00
commit 01f4604350
17 changed files with 95 additions and 11 deletions

View File

@ -115,6 +115,10 @@ public enum VersionEnum {
V6_4_0,
V6_4_1,
V6_4_2,
V6_4_3,
V6_4_4,
V6_4_5,
V6_4_6,
V6_5_0,
V6_6_0,
V6_6_1,

View File

@ -0,0 +1,2 @@
The known issue with Bulk Export in HAPI 6.4.0 has been resolved. Bulk export functionality is now
more performant at large scale, and does not generate occasional incomplete file reports.

View File

@ -1,3 +1,3 @@
---
release-date: "2023-03-15"
codename: "Vishwa"
release-date: "2023-03-08"
codename: "Wizard"

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4652
backport: 6.4.5
title: "Fix for MSSQL migration failure related to Job instance UPDATE_TIME column default value not being set correctly"

View File

@ -0,0 +1,3 @@
---
release-date: "2023-03-15"
codename: "Wizard"

View File

@ -0,0 +1,10 @@
This release fixes an accidental behaviour that was introduced in 6.4.2. From that version up until now, if a Tag Definition was created with a null `userSelected` element,
it would still be stored as `false` instead of `null`. This release fixes that behaviour, and now correctly stores the value as `null` if it is not specified. If you do not use this field, no action needs to be taken. However, if you do use this field, the `userSelected` elements stored from the installation of version 2023.02.R02 up until now are potentially suspect. The following SQL can be executed to clear the `false` value from this table and replace it with null, if desired:
```sql
update HFJ_TAG_DEF
set TAG_USER_SELECTED = null
where TAG_USER_SELECTED = 'false'
```
This will wholesale replace all `userSelected` fields.

View File

@ -0,0 +1,3 @@
---
release-date: "2023-03-15"
codename: "Wizard"

View File

@ -0,0 +1,3 @@
---
release-date: "2023-06-28"
codename: "Wizard"

View File

@ -2,4 +2,5 @@
type: fix
issue: 4597
jira: SMILE-5993
backport: 6.4.4
title: "Simultaneous conditional create or create-on-update operations no longer create duplicate matching resources."

View File

@ -1,4 +1,5 @@
---
type: perf
issue: 4622
backport: 6.4.3
title: "The batch system now reads less data during the maintenance pass. This avoids slowdowns on large systems."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4630
backport: 6.4.3
title: "Default values are provided for the new UPDATE_TIME columns so batch jobs started before an upgrade can complete."

View File

@ -1,4 +1,5 @@
---
type: fix
issue: 4813
backport: 6.4.5
title: "Under heavy concurrency, a bug resulted in identical tag definitions being rejected with a `NonUniqueResultException` some of the time. This has been corrected."

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 5742
title: Fixed behaviour of the _language query parameter. Now it is picked up as search parameter in the resource provider and filters accordingly. Thanks to Jens Villadsen (@jkiddo) for the contribution!

View File

@ -221,15 +221,6 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
protected void init680() {
Builder version = forVersion(VersionEnum.V6_8_0);
// HAPI-FHIR #4801 - Add New Index On HFJ_RESOURCE
Builder.BuilderWithTableName resourceTable = version.onTable("HFJ_RESOURCE");
resourceTable
.addIndex("20230502.1", "IDX_RES_RESID_UPDATED")
.unique(false)
.online(true)
.withColumns("RES_ID", "RES_UPDATED", "PARTITION_ID");
Builder.BuilderWithTableName tagDefTable = version.onTable("HFJ_TAG_DEF");
tagDefTable.dropIndex("20230505.1", "IDX_TAGDEF_TYPESYSCODEVERUS");
@ -731,6 +722,48 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.addColumn("20230110.2", "UPDATE_TIME")
.nullable()
.type(ColumnTypeEnum.DATE_TIMESTAMP);
Map<DriverTypeEnum, String> updateBatch2JobInstance = new HashMap<>();
updateBatch2JobInstance.put(
DriverTypeEnum.H2_EMBEDDED,
"update BT2_JOB_INSTANCE set UPDATE_TIME = coalesce(end_time, start_time, create_time, TIMESTAMP '2023-01-01 00:00:00') where UPDATE_TIME is null");
updateBatch2JobInstance.put(
DriverTypeEnum.MARIADB_10_1,
"update BT2_JOB_INSTANCE set UPDATE_TIME = coalesce(end_time, start_time, create_time, TIMESTAMP '2023-01-01 00:00:00') where UPDATE_TIME is null");
updateBatch2JobInstance.put(
DriverTypeEnum.MYSQL_5_7,
"update BT2_JOB_INSTANCE set UPDATE_TIME = coalesce(end_time, start_time, create_time, TIMESTAMP '2023-01-01 00:00:00') where UPDATE_TIME is null");
updateBatch2JobInstance.put(
DriverTypeEnum.ORACLE_12C,
"update BT2_JOB_INSTANCE set UPDATE_TIME = coalesce(end_time, start_time, create_time, TIMESTAMP '2023-01-01 00:00:00') where UPDATE_TIME is null");
updateBatch2JobInstance.put(
DriverTypeEnum.POSTGRES_9_4,
"update BT2_JOB_INSTANCE set UPDATE_TIME = coalesce(end_time, start_time, create_time, TIMESTAMP '2023-01-01 00:00:00') where UPDATE_TIME is null");
updateBatch2JobInstance.put(
DriverTypeEnum.MSSQL_2012,
"update BT2_JOB_INSTANCE set UPDATE_TIME = coalesce(end_time, start_time, create_time, CONVERT(DATETIME,'2023-01-01 00:00:00')) where UPDATE_TIME is null");
version.executeRawSql("20230397.1", updateBatch2JobInstance);
Map<DriverTypeEnum, String> updateBatch2WorkChunk = new HashMap<>();
updateBatch2WorkChunk.put(
DriverTypeEnum.H2_EMBEDDED,
"update bt2_work_chunk set UPDATE_TIME = coalesce(end_time, start_time, create_time, TIMESTAMP '2023-01-01 00:00:00') where UPDATE_TIME is null");
updateBatch2WorkChunk.put(
DriverTypeEnum.MARIADB_10_1,
"update bt2_work_chunk set UPDATE_TIME = coalesce(end_time, start_time, create_time, TIMESTAMP '2023-01-01 00:00:00') where UPDATE_TIME is null");
updateBatch2WorkChunk.put(
DriverTypeEnum.MYSQL_5_7,
"update bt2_work_chunk set UPDATE_TIME = coalesce(end_time, start_time, create_time, TIMESTAMP '2023-01-01 00:00:00') where UPDATE_TIME is null");
updateBatch2WorkChunk.put(
DriverTypeEnum.ORACLE_12C,
"update bt2_work_chunk set UPDATE_TIME = coalesce(end_time, start_time, create_time, TIMESTAMP '2023-01-01 00:00:00') where UPDATE_TIME is null");
updateBatch2WorkChunk.put(
DriverTypeEnum.POSTGRES_9_4,
"update bt2_work_chunk set UPDATE_TIME = coalesce(end_time, start_time, create_time, TIMESTAMP '2023-01-01 00:00:00') where UPDATE_TIME is null");
updateBatch2WorkChunk.put(
DriverTypeEnum.MSSQL_2012,
"update bt2_work_chunk set UPDATE_TIME = coalesce(end_time, start_time, create_time, CONVERT(DATETIME,'2023-01-01 00:00:00')) where UPDATE_TIME is null");
version.executeRawSql("20230397.2", updateBatch2WorkChunk);
}
private void init610() {

View File

@ -73,6 +73,12 @@ public class ${className}ResourceProvider extends
@OptionalParam(name=ca.uhn.fhir.rest.api.Constants.PARAM_LIST)
StringAndListParam theList,
#if ( $version == 'R5' )
@Description(shortDefinition="The language of the resource")
@OptionalParam(name=ca.uhn.fhir.rest.api.Constants.PARAM_LANGUAGE)
TokenAndListParam theResourceLanguage,
#end
@Description(shortDefinition="Search for resources which have the given source value (Resource.meta.source)")
@OptionalParam(name=ca.uhn.fhir.rest.api.Constants.PARAM_SOURCE)
UriAndListParam theSearchForSource,
@ -154,6 +160,9 @@ public class ${className}ResourceProvider extends
paramMap.add(ca.uhn.fhir.rest.api.Constants.PARAM_PROFILE, theSearchForProfile);
paramMap.add(ca.uhn.fhir.rest.api.Constants.PARAM_SOURCE, theSearchForSource);
paramMap.add(ca.uhn.fhir.rest.api.Constants.PARAM_LIST, theList);
#if ( $version == 'R5' )
paramMap.add(ca.uhn.fhir.rest.api.Constants.PARAM_LANGUAGE, theResourceLanguage);
#end
paramMap.add("_has", theHas);
#foreach ( $param in $searchParams )
paramMap.add("${param.name}", the${param.nameCapitalized});