diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java index 15a7402496d..77a48240a83 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java @@ -78,7 +78,9 @@ public enum VersionEnum { V5_5_1, V5_5_2, V5_5_3, + V5_5_4, V5_6_0, + V5_6_1, V5_7_0 ; diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_6_1/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_6_1/version.yaml new file mode 100644 index 00000000000..10e3c0ae5c7 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_6_1/version.yaml @@ -0,0 +1,3 @@ +--- +release-date: "2021-12-03" +codename: "Raccoon" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3153-updating-unknowncodesystem-to-allow-warnings-to-be-registered-as-warnings.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3153-updating-unknowncodesystem-to-allow-warnings-to-be-registered-as-warnings.yaml index b1e0a1eaafd..bbf22d3ac07 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3153-updating-unknowncodesystem-to-allow-warnings-to-be-registered-as-warnings.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3153-updating-unknowncodesystem-to-allow-warnings-to-be-registered-as-warnings.yaml @@ -1,6 +1,7 @@ --- type: fix issue: 3153 +backport: 5.6.1 jira: SMILE-3289 title: "Updated UnknownCodeSystemWarningValidationSupport to allow the throwing of warnings if configured to do so." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3158-creation-of-versioned-placeholders.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3158-creation-of-versioned-placeholders.yaml index 20d2fd864af..d6fe1bcca32 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3158-creation-of-versioned-placeholders.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3158-creation-of-versioned-placeholders.yaml @@ -1,5 +1,6 @@ --- type: fix issue: 3158 +backport: 5.6.1 title: "Resource links were previously not being consistently created in cases where references were versioned and pointing to recently auto-created placeholder resources." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3198-package-loading-woes.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3198-package-loading-woes.yaml index 800e393f9fc..5bba478f8d6 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3198-package-loading-woes.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3198-package-loading-woes.yaml @@ -2,4 +2,5 @@ type: change issue: 3198 jira: SMILE-3452 +backport: 5.6.1 title: "Fixed a regression where packages would fail to load if HAPI-FHIR was operating in DATABASE binary storage mode." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3211-reindex-performance.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3211-reindex-performance.yaml index b429d77dcad..150d7bc4e12 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3211-reindex-performance.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3211-reindex-performance.yaml @@ -1,4 +1,5 @@ --- type: fix +backport: 5.6.1 jira: SMILE-3472 title: "Fixed a serious performance issue with the `$reindex` operation." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3215-nextURL-does-not-work-for-a-certain-offset-with-everything.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3215-nextURL-does-not-work-for-a-certain-offset-with-everything.yaml index bd97cd96ce6..328509868b5 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3215-nextURL-does-not-work-for-a-certain-offset-with-everything.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3215-nextURL-does-not-work-for-a-certain-offset-with-everything.yaml @@ -1,4 +1,5 @@ --- type: fix issue: 3215 +backport: 5.6.1 title: "$everything operation returns a 500 error when querying for a page when _getpagesoffset is greater than or equal to 300. This has been corrected." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3216-mdm-possible-match-to-match.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3216-mdm-possible-match-to-match.yaml index 4483c10ab87..3f9cf9690ea 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3216-mdm-possible-match-to-match.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_7_0/3216-mdm-possible-match-to-match.yaml @@ -1,3 +1,4 @@ --- type: fix +backport: 5.6.1 title: "MDM was throwing a NullPointerException when upgrading a match from POSSIBLE_MATCH to MATCH. This has been corrected." diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java index 06a5cdddce4..749233b7d55 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java @@ -46,7 +46,10 @@ import org.apache.commons.lang3.Validate; import org.intellij.lang.annotations.Language; import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.stream.Collectors; @@ -73,11 +76,21 @@ public class Builder { } public BuilderCompleteTask executeRawSql(String theVersion, @Language("SQL") String theSql) { - ExecuteRawSqlTask task = new ExecuteRawSqlTask(myRelease, theVersion).addSql(theSql); - mySink.addTask(task); + ExecuteRawSqlTask task = executeRawSqlOptional(false, theVersion, theSql); return new BuilderCompleteTask(task); } + public void executeRawSqlStub(String theVersion, @Language("SQL") String theSql) { + executeRawSqlOptional(true, theVersion, theSql); + } + + private ExecuteRawSqlTask executeRawSqlOptional(boolean theDoNothing, String theVersion, @Language("SQL") String theSql) { + ExecuteRawSqlTask task = new ExecuteRawSqlTask(myRelease, theVersion).addSql(theSql); + task.setDoNothing(theDoNothing); + mySink.addTask(task); + return task; + } + public Builder initializeSchema(String theVersion, ISchemaInitializationProvider theSchemaInitializationProvider) { mySink.addTask(new InitializeSchemaTask(myRelease, theVersion, theSchemaInitializationProvider)); return this; @@ -96,6 +109,41 @@ public class Builder { return this; } + /** + * Builder method to define a raw SQL execution migration that needs to take place against multiple database types, + * and the SQL they need to use is not equal. Provide a map of driver types to SQL statements. + * + * @param theVersion The version of the migration. + * @param theDriverToSql Map of driver types to SQL statements. + * @return + */ + public Builder executeRawSql(String theVersion, Map theDriverToSql) { + Map> singleSqlStatementMap = new HashMap<>(); + theDriverToSql.entrySet().stream() + .forEach(entry -> { + singleSqlStatementMap.put(entry.getKey(), Collections.singletonList(entry.getValue())); + }); + return executeRawSqls(theVersion, singleSqlStatementMap); + } + + /** + * Builder method to define a raw SQL execution migration that needs to take place against multiple database types, + * and the SQL they need to use is not equal, and there are multiple sql commands for a given database. + * Provide a map of driver types to list of SQL statements. + * + * @param theVersion The version of the migration. + * @param theDriverToSqls Map of driver types to list of SQL statements. + * @return + */ + public Builder executeRawSqls(String theVersion, Map> theDriverToSqls) { + ExecuteRawSqlTask executeRawSqlTask = new ExecuteRawSqlTask(myRelease, theVersion); + theDriverToSqls.entrySet().stream() + .forEach(entry -> { + entry.getValue().forEach(sql -> executeRawSqlTask.addSql(entry.getKey(), sql)); + }); + mySink.addTask(executeRawSqlTask); + return this; + } // Flyway doesn't support these kinds of migrations @Deprecated diff --git a/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ExecuteRawSqlTaskTest.java b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ExecuteRawSqlTaskTest.java index 7e0dc9d2804..e2c66c22b78 100644 --- a/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ExecuteRawSqlTaskTest.java +++ b/hapi-fhir-sql-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ExecuteRawSqlTaskTest.java @@ -6,6 +6,7 @@ import ca.uhn.fhir.util.VersionEnum; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Supplier; @@ -81,5 +82,57 @@ public class ExecuteRawSqlTaskTest extends BaseTest { } } + @ParameterizedTest(name = "{index}: {0}") + @MethodSource("data") + public void testDriverTypeBasedRawSqlExecution(Supplier theTestDatabaseDetails) { + //Given + before(theTestDatabaseDetails); + executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))"); + BaseMigrationTasks tasks = new BaseMigrationTasks<>(); + Map driverToSql = new HashMap<>(); + + //When + driverToSql.put(DriverTypeEnum.H2_EMBEDDED, "INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (123, 'abc')"); + driverToSql.put(DriverTypeEnum.DERBY_EMBEDDED, "INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (456, 'def')"); + tasks + .forVersion(VersionEnum.V4_0_0) + .executeRawSql("2001.01", driverToSql); + + getMigrator().addTasks(tasks.getTasks(VersionEnum.V0_1, VersionEnum.V4_0_0)); + getMigrator().migrate(); + + List> output = executeQuery("SELECT PID,TEXTCOL FROM SOMETABLE"); + //Then + if (theTestDatabaseDetails.get().getDriverType() == DriverTypeEnum.H2_EMBEDDED) { + assertEquals(1, output.size()); + assertEquals(123L, output.get(0).get("PID")); + assertEquals("abc", output.get(0).get("TEXTCOL")); + } else if (theTestDatabaseDetails.get().getDriverType() == DriverTypeEnum.DERBY_EMBEDDED) { + assertEquals(1, output.size()); + assertEquals(456L, output.get(0).get("PID")); + assertEquals("def", output.get(0).get("TEXTCOL")); + } else { + assertEquals(0, output.size()); + } + } + + @ParameterizedTest(name = "{index}: {0}") + @MethodSource("data") + public void testExecuteRawSqlStub(Supplier theTestDatabaseDetails) { + //Given + before(theTestDatabaseDetails); + executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))"); + + BaseMigrationTasks tasks = new BaseMigrationTasks<>(); + tasks.forVersion(VersionEnum.V4_0_0) + .executeRawSqlStub("2001.01", "INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (123, 'abc')"); + + getMigrator().addTasks(tasks.getTasks(VersionEnum.V0_1, VersionEnum.V4_0_0)); + getMigrator().migrate(); + + List> output = executeQuery("SELECT PID,TEXTCOL FROM SOMETABLE"); + + assertEquals(0, output.size()); + } }