CLI tool command migrate-database executing in dry-run mode insert entries into table FLY_HFJ_MIGRATION (#5487)
* initial test * Solution with changelog. * making spotless hapi * addressing comments from code reviews * making the test better. * addressing code review comment and adding test. --------- Co-authored-by: peartree <etienne.poirier@smilecdr.com>
This commit is contained in:
parent
f39dbb5751
commit
4cc4682245
|
@ -2,6 +2,9 @@ package ca.uhn.fhir.cli;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
|
||||||
import ca.uhn.fhir.system.HapiSystemProperties;
|
import ca.uhn.fhir.system.HapiSystemProperties;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
|
@ -123,11 +126,13 @@ public class HapiFlywayMigrateDatabaseCommandTest {
|
||||||
|
|
||||||
String url = "jdbc:h2:" + location.getAbsolutePath();
|
String url = "jdbc:h2:" + location.getAbsolutePath();
|
||||||
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
|
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
|
||||||
|
HapiMigrationDao hapiMigrationDao = new HapiMigrationDao(connectionProperties.getDataSource(), connectionProperties.getDriverType(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME);
|
||||||
|
|
||||||
String initSql = "/persistence_create_h2_340.sql";
|
String initSql = "/persistence_create_h2_340.sql";
|
||||||
executeSqlStatements(connectionProperties, initSql);
|
executeSqlStatements(connectionProperties, initSql);
|
||||||
|
|
||||||
seedDatabase340(connectionProperties);
|
seedDatabase340(connectionProperties);
|
||||||
|
seedDatabaseMigration340(hapiMigrationDao);
|
||||||
|
|
||||||
ourLog.info("**********************************************");
|
ourLog.info("**********************************************");
|
||||||
ourLog.info("Done Setup, Starting Migration...");
|
ourLog.info("Done Setup, Starting Migration...");
|
||||||
|
@ -160,6 +165,7 @@ public class HapiFlywayMigrateDatabaseCommandTest {
|
||||||
// Verify that foreign key FK_SEARCHRES_RES on HFJ_SEARCH_RESULT exists
|
// Verify that foreign key FK_SEARCHRES_RES on HFJ_SEARCH_RESULT exists
|
||||||
foreignKeys = JdbcUtils.getForeignKeys(connectionProperties, "HFJ_RESOURCE", "HFJ_SEARCH_RESULT");
|
foreignKeys = JdbcUtils.getForeignKeys(connectionProperties, "HFJ_RESOURCE", "HFJ_SEARCH_RESULT");
|
||||||
assertTrue(foreignKeys.contains("FK_SEARCHRES_RES"));
|
assertTrue(foreignKeys.contains("FK_SEARCHRES_RES"));
|
||||||
|
int expectedMigrationEntities = hapiMigrationDao.findAll().size();
|
||||||
|
|
||||||
App.main(args);
|
App.main(args);
|
||||||
|
|
||||||
|
@ -181,6 +187,8 @@ public class HapiFlywayMigrateDatabaseCommandTest {
|
||||||
// Verify that foreign key FK_SEARCHRES_RES on HFJ_SEARCH_RESULT still exists
|
// Verify that foreign key FK_SEARCHRES_RES on HFJ_SEARCH_RESULT still exists
|
||||||
foreignKeys = JdbcUtils.getForeignKeys(connectionProperties, "HFJ_RESOURCE", "HFJ_SEARCH_RESULT");
|
foreignKeys = JdbcUtils.getForeignKeys(connectionProperties, "HFJ_RESOURCE", "HFJ_SEARCH_RESULT");
|
||||||
assertTrue(foreignKeys.contains("FK_SEARCHRES_RES"));
|
assertTrue(foreignKeys.contains("FK_SEARCHRES_RES"));
|
||||||
|
assertTrue(expectedMigrationEntities == hapiMigrationDao.findAll().size());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -210,6 +218,38 @@ public class HapiFlywayMigrateDatabaseCommandTest {
|
||||||
assertTrue(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB")); // Late table
|
assertTrue(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB")); // Late table
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMigrateFrom340_dryRun_whenNoMigrationTableExists() throws IOException, SQLException {
|
||||||
|
|
||||||
|
File location = getLocation("migrator_h2_test_340_dryrun");
|
||||||
|
|
||||||
|
String url = "jdbc:h2:" + location.getAbsolutePath();
|
||||||
|
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
|
||||||
|
HapiMigrationDao hapiMigrationDao = new HapiMigrationDao(connectionProperties.getDataSource(), connectionProperties.getDriverType(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME);
|
||||||
|
|
||||||
|
String initSql = "/persistence_create_h2_340.sql";
|
||||||
|
executeSqlStatements(connectionProperties, initSql);
|
||||||
|
|
||||||
|
seedDatabase340(connectionProperties);
|
||||||
|
|
||||||
|
ourLog.info("**********************************************");
|
||||||
|
ourLog.info("Done Setup, Starting Migration...");
|
||||||
|
ourLog.info("**********************************************");
|
||||||
|
|
||||||
|
String[] args = new String[]{
|
||||||
|
BaseFlywayMigrateDatabaseCommand.MIGRATE_DATABASE,
|
||||||
|
"-d", "H2_EMBEDDED",
|
||||||
|
"-u", url,
|
||||||
|
"-n", "",
|
||||||
|
"-p", "",
|
||||||
|
"-r"
|
||||||
|
};
|
||||||
|
|
||||||
|
App.main(args);
|
||||||
|
|
||||||
|
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("FLY_HFJ_MIGRATION"));
|
||||||
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
private File getLocation(String theDatabaseName) throws IOException {
|
private File getLocation(String theDatabaseName) throws IOException {
|
||||||
File directory = new File(DB_DIRECTORY);
|
File directory = new File(DB_DIRECTORY);
|
||||||
|
@ -360,4 +400,16 @@ public class HapiFlywayMigrateDatabaseCommandTest {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void seedDatabaseMigration340(HapiMigrationDao theHapiMigrationDao) {
|
||||||
|
theHapiMigrationDao.createMigrationTableIfRequired();
|
||||||
|
HapiMigrationEntity hapiMigrationEntity = new HapiMigrationEntity();
|
||||||
|
hapiMigrationEntity.setPid(1);
|
||||||
|
hapiMigrationEntity.setVersion("3.4.0.20180401.1");
|
||||||
|
hapiMigrationEntity.setDescription("some sql statement");
|
||||||
|
hapiMigrationEntity.setExecutionTime(25);
|
||||||
|
hapiMigrationEntity.setSuccess(true);
|
||||||
|
|
||||||
|
theHapiMigrationDao.save(hapiMigrationEntity);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 5486
|
||||||
|
jira: SMILE-7457
|
||||||
|
title: "Previously, testing database migration with cli migrate-database command in dry-run mode would insert in the
|
||||||
|
migration task table. The issue has been fixed."
|
|
@ -188,7 +188,9 @@ public class HapiMigrator {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void postExecute(BaseTask theNext, StopWatch theStopWatch, boolean theSuccess) {
|
private void postExecute(BaseTask theNext, StopWatch theStopWatch, boolean theSuccess) {
|
||||||
myHapiMigrationStorageSvc.saveTask(theNext, Math.toIntExact(theStopWatch.getMillis()), theSuccess);
|
if (!theNext.isDryRun()) {
|
||||||
|
myHapiMigrationStorageSvc.saveTask(theNext, Math.toIntExact(theStopWatch.getMillis()), theSuccess);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addTasks(Iterable<BaseTask> theMigrationTasks) {
|
public void addTasks(Iterable<BaseTask> theMigrationTasks) {
|
||||||
|
@ -219,6 +221,8 @@ public class HapiMigrator {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void createMigrationTableIfRequired() {
|
public void createMigrationTableIfRequired() {
|
||||||
myHapiMigrationStorageSvc.createMigrationTableIfRequired();
|
if (!myDryRun) {
|
||||||
|
myHapiMigrationStorageSvc.createMigrationTableIfRequired();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue