Add migrator to uhnfhirtest (#5508)

* Add migrator to uhnfhirtest

* Fix spotless

* Test fix
This commit is contained in:
James Agnew 2023-11-29 09:16:41 -05:00 committed by GitHub
parent 6e683405a1
commit 469873aff7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 98 additions and 30 deletions

View File

@ -5,11 +5,14 @@ import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator; import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao; import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity; import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import ca.uhn.fhir.jpa.util.RandomTextUtils;
import ca.uhn.fhir.system.HapiSystemProperties; import ca.uhn.fhir.system.HapiSystemProperties;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.JdbcTemplate;
@ -35,10 +38,11 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
@TestMethodOrder(MethodOrderer.MethodName.class)
public class HapiFlywayMigrateDatabaseCommandTest { public class HapiFlywayMigrateDatabaseCommandTest {
private static final Logger ourLog = LoggerFactory.getLogger(HapiFlywayMigrateDatabaseCommandTest.class); private static final Logger ourLog = LoggerFactory.getLogger(HapiFlywayMigrateDatabaseCommandTest.class);
public static final String DB_DIRECTORY = "target/h2_test"; private final String myDbDirectory = "target/h2_test/" + RandomTextUtils.newSecureRandomAlphaNumericString(5);
static { static {
HapiSystemProperties.enableTestMode(); HapiSystemProperties.enableTestMode();
@ -252,12 +256,12 @@ public class HapiFlywayMigrateDatabaseCommandTest {
@Nonnull @Nonnull
private File getLocation(String theDatabaseName) throws IOException { private File getLocation(String theDatabaseName) throws IOException {
File directory = new File(DB_DIRECTORY); File directory = new File(myDbDirectory);
if (directory.exists()) { if (directory.exists()) {
FileUtils.deleteDirectory(directory); FileUtils.deleteDirectory(directory);
} }
return new File(DB_DIRECTORY + "/" + theDatabaseName); return new File(myDbDirectory + "/" + theDatabaseName);
} }
private void seedDatabase340(DriverTypeEnum.ConnectionProperties theConnectionProperties) { private void seedDatabase340(DriverTypeEnum.ConnectionProperties theConnectionProperties) {

View File

@ -977,27 +977,34 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Ugh. Only oracle supports using IDX_TAG_DEF_TP_CD_SYS to enforce this constraint. The others will // Ugh. Only oracle supports using IDX_TAG_DEF_TP_CD_SYS to enforce this constraint. The others will
// create another index. // create another index.
// For Sql Server, should change the index to be unique with include columns. Do this in 6.1 // For Sql Server, should change the index to be unique with include columns. Do this in 6.1
tagTable.dropIndex("20220429.8", "IDX_TAGDEF_TYPESYSCODE"); // tagTable.dropIndex("20220429.8", "IDX_TAGDEF_TYPESYSCODE");
Map<DriverTypeEnum, String> addTagDefConstraint = new HashMap<>(); // Map<DriverTypeEnum, String> addTagDefConstraint = new HashMap<>();
addTagDefConstraint.put( // addTagDefConstraint.put(
DriverTypeEnum.H2_EMBEDDED, // DriverTypeEnum.H2_EMBEDDED,
"ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); // "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE,
addTagDefConstraint.put( // TAG_SYSTEM)");
DriverTypeEnum.MARIADB_10_1, // addTagDefConstraint.put(
"ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); // DriverTypeEnum.MARIADB_10_1,
addTagDefConstraint.put( // "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE,
DriverTypeEnum.MSSQL_2012, // TAG_SYSTEM)");
"ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); // addTagDefConstraint.put(
addTagDefConstraint.put( // DriverTypeEnum.MSSQL_2012,
DriverTypeEnum.MYSQL_5_7, // "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE,
"ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); // TAG_SYSTEM)");
addTagDefConstraint.put( // addTagDefConstraint.put(
DriverTypeEnum.ORACLE_12C, // DriverTypeEnum.MYSQL_5_7,
"ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); // "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE,
addTagDefConstraint.put( // TAG_SYSTEM)");
DriverTypeEnum.POSTGRES_9_4, // addTagDefConstraint.put(
"ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); // DriverTypeEnum.ORACLE_12C,
version.executeRawSql("20220429.9", addTagDefConstraint); // "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE,
// TAG_SYSTEM)");
// addTagDefConstraint.put(
// DriverTypeEnum.POSTGRES_9_4,
// "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE,
// TAG_SYSTEM)");
// version.executeRawSql("20220429.9", addTagDefConstraint);
version.addNop("20220429.9");
} }
// Fix for https://github.com/hapifhir/hapi-fhir-jpaserver-starter/issues/328 // Fix for https://github.com/hapifhir/hapi-fhir-jpaserver-starter/issues/328

View File

@ -23,6 +23,7 @@ import ca.uhn.fhir.storage.interceptor.balp.IBalpAuditEventSink;
import ca.uhn.fhirtest.ScheduledSubscriptionDeleter; import ca.uhn.fhirtest.ScheduledSubscriptionDeleter;
import ca.uhn.fhirtest.interceptor.AnalyticsInterceptor; import ca.uhn.fhirtest.interceptor.AnalyticsInterceptor;
import ca.uhn.fhirtest.joke.HolyFooCowInterceptor; import ca.uhn.fhirtest.joke.HolyFooCowInterceptor;
import ca.uhn.fhirtest.migrate.FhirTestAutoMigrator;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
@ -131,6 +132,11 @@ public class CommonConfig {
return new FhirTestBalpAuditContextServices(); return new FhirTestBalpAuditContextServices();
} }
@Bean
public FhirTestAutoMigrator migrator() {
return new FhirTestAutoMigrator();
}
public static boolean isLocalTestMode() { public static boolean isLocalTestMode() {
return "true".equalsIgnoreCase(System.getProperty("testmode.local")); return "true".equalsIgnoreCase(System.getProperty("testmode.local"));
} }

View File

@ -120,7 +120,7 @@ public class TestAuditConfig {
} }
extraProperties.put("hibernate.format_sql", "false"); extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update"); extraProperties.put("hibernate.hbm2ddl.auto", "none");
extraProperties.put("hibernate.jdbc.batch_size", "20"); extraProperties.put("hibernate.jdbc.batch_size", "20");
extraProperties.put("hibernate.cache.use_query_cache", "false"); extraProperties.put("hibernate.cache.use_query_cache", "false");
extraProperties.put("hibernate.cache.use_second_level_cache", "false"); extraProperties.put("hibernate.cache.use_second_level_cache", "false");

View File

@ -138,7 +138,7 @@ public class TestDstu2Config {
} }
extraProperties.put("hibernate.format_sql", "false"); extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update"); extraProperties.put("hibernate.hbm2ddl.auto", "none");
extraProperties.put("hibernate.jdbc.batch_size", "20"); extraProperties.put("hibernate.jdbc.batch_size", "20");
extraProperties.put("hibernate.cache.use_query_cache", "false"); extraProperties.put("hibernate.cache.use_query_cache", "false");
extraProperties.put("hibernate.cache.use_second_level_cache", "false"); extraProperties.put("hibernate.cache.use_second_level_cache", "false");

View File

@ -138,7 +138,7 @@ public class TestDstu3Config {
} }
extraProperties.put("hibernate.format_sql", "false"); extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update"); extraProperties.put("hibernate.hbm2ddl.auto", "none");
extraProperties.put("hibernate.jdbc.batch_size", "20"); extraProperties.put("hibernate.jdbc.batch_size", "20");
extraProperties.put("hibernate.cache.use_query_cache", "false"); extraProperties.put("hibernate.cache.use_query_cache", "false");
extraProperties.put("hibernate.cache.use_second_level_cache", "false"); extraProperties.put("hibernate.cache.use_second_level_cache", "false");

View File

@ -136,7 +136,7 @@ public class TestR4BConfig {
} }
extraProperties.put("hibernate.format_sql", "false"); extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update"); extraProperties.put("hibernate.hbm2ddl.auto", "none");
extraProperties.put("hibernate.jdbc.batch_size", "20"); extraProperties.put("hibernate.jdbc.batch_size", "20");
extraProperties.put("hibernate.cache.use_query_cache", "false"); extraProperties.put("hibernate.cache.use_query_cache", "false");
extraProperties.put("hibernate.cache.use_second_level_cache", "false"); extraProperties.put("hibernate.cache.use_second_level_cache", "false");

View File

@ -139,7 +139,7 @@ public class TestR4Config {
} }
extraProperties.put("hibernate.format_sql", "false"); extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update"); extraProperties.put("hibernate.hbm2ddl.auto", "none");
extraProperties.put("hibernate.jdbc.batch_size", "20"); extraProperties.put("hibernate.jdbc.batch_size", "20");
extraProperties.put("hibernate.cache.use_query_cache", "false"); extraProperties.put("hibernate.cache.use_query_cache", "false");
extraProperties.put("hibernate.cache.use_second_level_cache", "false"); extraProperties.put("hibernate.cache.use_second_level_cache", "false");

View File

@ -148,7 +148,7 @@ public class TestR5Config {
} }
extraProperties.put("hibernate.format_sql", "false"); extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update"); extraProperties.put("hibernate.hbm2ddl.auto", "none");
extraProperties.put("hibernate.jdbc.batch_size", "20"); extraProperties.put("hibernate.jdbc.batch_size", "20");
extraProperties.put("hibernate.cache.use_query_cache", "false"); extraProperties.put("hibernate.cache.use_query_cache", "false");
extraProperties.put("hibernate.cache.use_second_level_cache", "false"); extraProperties.put("hibernate.cache.use_second_level_cache", "false");

View File

@ -0,0 +1,51 @@
package ca.uhn.fhirtest.migrate;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc;
import ca.uhn.fhir.jpa.migrate.MigrationTaskList;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import ca.uhn.fhirtest.config.CommonConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Properties;
import java.util.Set;
import javax.annotation.PostConstruct;
import javax.sql.DataSource;
public class FhirTestAutoMigrator {
private static final Logger ourLog = LoggerFactory.getLogger(FhirTestAutoMigrator.class);
public static final String MIGRATION_TABLENAME = "MIGRATIONS";
@Autowired
private DataSource myDataSource;
@PostConstruct
public void run() {
DriverTypeEnum driver;
if (CommonConfig.isLocalTestMode()) {
driver = DriverTypeEnum.H2_EMBEDDED;
} else {
driver = DriverTypeEnum.POSTGRES_9_4;
}
HapiMigrationDao hapiMigrationDao = new HapiMigrationDao(myDataSource, driver, MIGRATION_TABLENAME);
HapiMigrationStorageSvc hapiMigrationStorageSvc = new HapiMigrationStorageSvc(hapiMigrationDao);
MigrationTaskList tasks = new HapiFhirJpaMigrationTasks(Set.of()).getAllTasks(VersionEnum.values());
SchemaMigrator schemaMigrator = new SchemaMigrator(
"HAPI FHIR", MIGRATION_TABLENAME, myDataSource, new Properties(), tasks, hapiMigrationStorageSvc);
schemaMigrator.setDriverType(driver);
ourLog.info("About to run migration...");
schemaMigrator.createMigrationTableIfRequired();
schemaMigrator.migrate();
ourLog.info("Migration complete");
}
}