Improve migrator (#6120)

* wip

* spotless

* Allow schema migrator to propagate down additional CLI-style config

* wip

* implement dummy constructor so i dont break the build

* wip
This commit is contained in:
Tadgh 2024-07-20 14:10:42 -07:00 committed by GitHub
parent 497eb6002f
commit fcb69e600f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 76 additions and 2 deletions

View File

@ -63,10 +63,17 @@ public class HapiMigrator {
return myDataSource; return myDataSource;
} }
/**
* If set to true, instead of executing migrations, will instead simply print the SQL that would be executed against the connection.
* @return A boolean indicating whether or not the migration should be a dry run
*/
public boolean isDryRun() { public boolean isDryRun() {
return myDryRun; return myDryRun;
} }
/**
* If set to true, instead of executing migrations, will instead simply print the SQL that would be executed against the connection.
*/
public void setDryRun(boolean theDryRun) { public void setDryRun(boolean theDryRun) {
myDryRun = theDryRun; myDryRun = theDryRun;
} }

View File

@ -32,21 +32,85 @@ import java.util.List;
import java.util.Properties; import java.util.Properties;
import javax.sql.DataSource; import javax.sql.DataSource;
/**
* The SchemaMigrator class is responsible for managing and executing database schema migrations during standard bootup.
* It provides methods to validate the schema version and migrate the schema if necessary.
*
* This supports all the configuation needed to run via the CLI or via a bootup migrator. Specifically
* 1. Dry Run
* 2. Enable Heavyweight Tasks
* 3. Task Skipping
*
*/
public class SchemaMigrator { public class SchemaMigrator {
public static final String HAPI_FHIR_MIGRATION_TABLENAME = "FLY_HFJ_MIGRATION"; public static final String HAPI_FHIR_MIGRATION_TABLENAME = "FLY_HFJ_MIGRATION";
private static final Logger ourLog = LoggerFactory.getLogger(SchemaMigrator.class); private static final Logger ourLog = LoggerFactory.getLogger(SchemaMigrator.class);
/**
* The Schema Name
*/
private final String mySchemaName; private final String mySchemaName;
/**
* The datasource to connect to the Database with.
*/
private final DataSource myDataSource; private final DataSource myDataSource;
/**
* Whether to skip validation of the schema
*/
private final boolean mySkipValidation; private final boolean mySkipValidation;
/**
* See {@link HapiMigrator#isRunHeavyweightSkippableTasks()}. Enables or disables Optional Heavyweight migrations
*/
private final boolean myRunHeavyweightMigrationTasks;
/**
* The name of the table in which migrations are tracked.
*/
private final String myMigrationTableName; private final String myMigrationTableName;
/**
* The actual task list, which will be filtered during construction, and having all Tasks enumerated in `theMigrationTasksToSkip` removed before execution.
*/
private final MigrationTaskList myMigrationTasks; private final MigrationTaskList myMigrationTasks;
/**
* See {@link HapiMigrator#isDryRun()}
*/
private final boolean myDryRun;
private DriverTypeEnum myDriverType; private DriverTypeEnum myDriverType;
/**
* Inventory of callbacks to invoke for pre and post execution
*/
private List<IHapiMigrationCallback> myCallbacks = Collections.emptyList(); private List<IHapiMigrationCallback> myCallbacks = Collections.emptyList();
private final HapiMigrationStorageSvc myHapiMigrationStorageSvc; private final HapiMigrationStorageSvc myHapiMigrationStorageSvc;
/** /**
* Constructor * Constructor
*/ */
public SchemaMigrator(
String theSchemaName,
String theMigrationTableName,
DataSource theDataSource,
boolean theEnableHeavyweighTtasks,
String theMigrationTasksToSkip,
boolean theDryRun,
Properties jpaProperties,
MigrationTaskList theMigrationTasks,
HapiMigrationStorageSvc theHapiMigrationStorageSvc) {
mySchemaName = theSchemaName;
myDataSource = theDataSource;
myMigrationTableName = theMigrationTableName;
myMigrationTasks = theMigrationTasks;
myRunHeavyweightMigrationTasks = theEnableHeavyweighTtasks;
mySkipValidation = jpaProperties.containsKey(AvailableSettings.HBM2DDL_AUTO)
&& "update".equals(jpaProperties.getProperty(AvailableSettings.HBM2DDL_AUTO));
myHapiMigrationStorageSvc = theHapiMigrationStorageSvc;
// Skip the skipped versions here.
myMigrationTasks.setDoNothingOnSkippedTasks(theMigrationTasksToSkip);
myDryRun = theDryRun;
}
/**
* Temporary Dummy Constructor to remove once CDR side merges.
*/
public SchemaMigrator( public SchemaMigrator(
String theSchemaName, String theSchemaName,
String theMigrationTableName, String theMigrationTableName,
@ -58,10 +122,12 @@ public class SchemaMigrator {
myDataSource = theDataSource; myDataSource = theDataSource;
myMigrationTableName = theMigrationTableName; myMigrationTableName = theMigrationTableName;
myMigrationTasks = theMigrationTasks; myMigrationTasks = theMigrationTasks;
myRunHeavyweightMigrationTasks = false;
mySkipValidation = jpaProperties.containsKey(AvailableSettings.HBM2DDL_AUTO) mySkipValidation = jpaProperties.containsKey(AvailableSettings.HBM2DDL_AUTO)
&& "update".equals(jpaProperties.getProperty(AvailableSettings.HBM2DDL_AUTO)); && "update".equals(jpaProperties.getProperty(AvailableSettings.HBM2DDL_AUTO));
myHapiMigrationStorageSvc = theHapiMigrationStorageSvc; myHapiMigrationStorageSvc = theHapiMigrationStorageSvc;
// Skip the skipped versions here.
myDryRun = false;
} }
public void validate() { public void validate() {
@ -111,6 +177,8 @@ public class SchemaMigrator {
migrator = new HapiMigrator(myMigrationTableName, myDataSource, myDriverType); migrator = new HapiMigrator(myMigrationTableName, myDataSource, myDriverType);
migrator.addTasks(myMigrationTasks); migrator.addTasks(myMigrationTasks);
migrator.setCallbacks(myCallbacks); migrator.setCallbacks(myCallbacks);
migrator.setDryRun(myDryRun);
migrator.setRunHeavyweightSkippableTasks(myRunHeavyweightMigrationTasks);
return migrator; return migrator;
} }

View File

@ -82,7 +82,6 @@ public class BaseMigrationTasks<T extends Enum> {
retval.addAll(nextValues); retval.addAll(nextValues);
} }
} }
return retval; return retval;
} }