Ks 20220916 remove flyway (#4040)

* removed flyway, started coding persistence

* started wiring up new dao service

* replace jpa with sqlbuilder

* down to 16 failed tests

* remove outOfOrder test.  We no longer support validating order.

* Removed noFlyway mode.  It is no longer a thing.

* merge migrator

* down to 1 failing test

* cleanup

* removed unused noflyway cli arguments

* cleanup

* cleanup

* cleanup

* cleanup

* create MigrationTaskList to replace the List<BaseTask> that gets passed around a lot.  This allows us to add a bunch of behaviour to that as a first-order thing

* fix test

* fix test

* improve migration table detection to use the catalogue

* Msg.code()

* cleanup

* save version in same format as flyway

* cleanup

* add migration result to migration result class

* failed migrations stay in the table and don't really affect anything.

* change log

* docs

* ugh Flyway requires lower-case column names

* integration test fixes

* log

* fix oracle issue

* lowercase tablenames for postgres.  fussy fussy!

* Revert "lowercase tablenames for postgres.  fussy fussy!"

This reverts commit 92c9cb263b.

* revert to capital table names.  surround table name by quotes for Postgres.

* added log message before executing find all query

* changed log message from info to debug

* javadoc

* pom cleanup

* review feedback

* Update hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskList.java

Co-authored-by: michaelabuckley <michaelabuckley@gmail.com>

* review feedback

* review feedback.  Use prepared statement.

* Change installedOn insert from timestamp to date

* Change installedOn column type from timestamp to date

* bump hapi version

* fix poms

* fix poms and cdr compile

* add exclusions to clean up build

Co-authored-by: Ken Stevens <ken@smilecdr.com>
Co-authored-by: michaelabuckley <michaelabuckley@gmail.com>
This commit is contained in:
Ken Stevens 2022-10-03 22:40:09 -04:00 committed by GitHub
parent 66baa8263d
commit 40163f73d2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
120 changed files with 1407 additions and 1231 deletions

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,8 @@ import org.junit.jupiter.api.Test;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.*;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
class HistorySearchDateRangeParamTest {
private final int theOffset = 100;

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -239,14 +239,6 @@
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-sqlserver</artifactId>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-mysql</artifactId>
</dependency>
<!-- Test Deps -->
<dependency>

View File

@ -21,12 +21,8 @@ package ca.uhn.fhir.cli;
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.BaseMigrator;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
import ca.uhn.fhir.jpa.migrate.MigrationTaskSkipper;
import ca.uhn.fhir.jpa.migrate.TaskOnlyMigrator;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
@ -35,7 +31,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
@ -50,7 +45,6 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
public static final String MIGRATE_DATABASE = "migrate-database";
public static final String NO_COLUMN_SHRINK = "no-column-shrink";
public static final String DONT_USE_FLYWAY = "dont-use-flyway";
public static final String STRICT_ORDER = "strict-order";
public static final String SKIP_VERSIONS = "skip-versions";
private Set<String> myFlags;
@ -84,8 +78,6 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
addRequiredOption(retVal, "p", "password", "Password", "The JDBC database password");
addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
addOptionalOption(retVal, "x", "flags", "Flags", "A comma-separated list of any specific migration flags (these flags are version specific, see migrator documentation for details)");
addOptionalOption(retVal, null, DONT_USE_FLYWAY, false, "If this option is set, the migrator will not use FlywayDB for migration. This setting should only be used if you are trying to migrate a legacy database platform that is not supported by FlywayDB.");
addOptionalOption(retVal, null, STRICT_ORDER, false, "If this option is set, the migrator will require migration tasks to be performed in order.");
addOptionalOption(retVal, null, NO_COLUMN_SHRINK, false, "If this flag is set, the system will not attempt to reduce the length of columns. This is useful in environments with a lot of existing data, where shrinking a column can take a very long time.");
addOptionalOption(retVal, null, SKIP_VERSIONS, "Versions", "A comma separated list of schema versions to skip. E.g. 4_1_0.20191214.2,4_1_0.20191214.4");
@ -119,36 +111,20 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
.filter(StringUtils::isNotBlank)
.collect(Collectors.toSet());
boolean dontUseFlyway = theCommandLine.hasOption(BaseFlywayMigrateDatabaseCommand.DONT_USE_FLYWAY);
boolean strictOrder = theCommandLine.hasOption(BaseFlywayMigrateDatabaseCommand.STRICT_ORDER);
BaseMigrator migrator;
if (dontUseFlyway || dryRun) {
// Flyway dryrun is not available in community edition
migrator = new TaskOnlyMigrator();
} else {
migrator = new FlywayMigrator(myMigrationTableName);
}
DriverTypeEnum.ConnectionProperties connectionProperties = driverType.newConnectionProperties(url, username, password);
HapiMigrator migrator = new HapiMigrator(myMigrationTableName, connectionProperties.getDataSource(), driverType);
migrator.setDataSource(connectionProperties.getDataSource());
migrator.setDriverType(driverType);
migrator.createMigrationTableIfRequired();
migrator.setDryRun(dryRun);
migrator.setNoColumnShrink(noColumnShrink);
migrator.setStrictOrder(strictOrder);
String skipVersions = theCommandLine.getOptionValue(BaseFlywayMigrateDatabaseCommand.SKIP_VERSIONS);
addTasks(migrator, skipVersions);
migrator.migrate();
}
protected abstract void addTasks(BaseMigrator theMigrator, String theSkippedVersions);
protected abstract void addTasks(HapiMigrator theMigrator, String theSkippedVersions);
public void setMigrationTableName(String theMigrationTableName) {
myMigrationTableName = theMigrationTableName;
}
protected void setDoNothingOnSkippedTasks(Collection<BaseTask> theTasks, String theSkipVersions) {
MigrationTaskSkipper.setDoNothingOnSkippedTasks(theTasks, theSkipVersions);
}
}

View File

@ -20,9 +20,9 @@ package ca.uhn.fhir.cli;
* #L%
*/
import ca.uhn.fhir.jpa.migrate.BaseMigrator;
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
import ca.uhn.fhir.jpa.migrate.MigrationTaskList;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import org.apache.commons.cli.CommandLine;
@ -44,10 +44,10 @@ public class HapiFlywayMigrateDatabaseCommand extends BaseFlywayMigrateDatabaseC
}
@Override
protected void addTasks(BaseMigrator theMigrator, String theSkipVersions) {
List<BaseTask> tasks = new HapiFhirJpaMigrationTasks(getFlags()).getAllTasks(VersionEnum.values());
super.setDoNothingOnSkippedTasks(tasks, theSkipVersions);
theMigrator.addTasks(tasks);
protected void addTasks(HapiMigrator theMigrator, String theSkipVersions) {
MigrationTaskList taskList = new HapiFhirJpaMigrationTasks(getFlags()).getAllTasks(VersionEnum.values());
taskList.setDoNothingOnSkippedTasks(theSkipVersions);
theMigrator.addTasks(taskList);
}
@Override

View File

@ -115,47 +115,6 @@ public class HapiFlywayMigrateDatabaseCommandTest {
});
}
@Test
public void testMigrateFrom340_NoFlyway() throws IOException, SQLException {
File location = getLocation("migrator_h2_test_340_current_noflyway");
String url = "jdbc:h2:" + location.getAbsolutePath();
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
String initSql = "/persistence_create_h2_340.sql";
executeSqlStatements(connectionProperties, initSql);
seedDatabase340(connectionProperties);
ourLog.info("**********************************************");
ourLog.info("Done Setup, Starting Migration...");
ourLog.info("**********************************************");
String[] args = new String[]{
BaseFlywayMigrateDatabaseCommand.MIGRATE_DATABASE,
"-d", "H2_EMBEDDED",
"-u", url,
"-n", "",
"-p", "",
"--" + BaseFlywayMigrateDatabaseCommand.DONT_USE_FLYWAY
};
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RES_REINDEX_JOB"));
App.main(args);
assertTrue(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RES_REINDEX_JOB"));
connectionProperties.getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
List<Map<String, Object>> values = jdbcTemplate.queryForList("SELECT * FROM hfj_spidx_token");
assertEquals(1, values.size());
assertEquals("identifier", values.get(0).get("SP_NAME"));
assertEquals("12345678", values.get(0).get("SP_VALUE"));
assertTrue(values.get(0).keySet().contains("HASH_IDENTITY"));
assertEquals(7001889285610424179L, values.get(0).get("HASH_IDENTITY"));
return null;
});
}
@Test
public void testMigrateFrom340_dryRun() throws IOException, SQLException {
@ -252,34 +211,6 @@ public class HapiFlywayMigrateDatabaseCommandTest {
assertTrue(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB")); // Late table
}
@Test
public void testMigrateFromEmptySchema_NoFlyway() throws IOException, SQLException {
File location = getLocation("migrator_h2_test_empty_current_noflyway");
String url = "jdbc:h2:" + location.getAbsolutePath();
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
ourLog.info("**********************************************");
ourLog.info("Starting Migration...");
ourLog.info("**********************************************");
String[] args = new String[]{
BaseFlywayMigrateDatabaseCommand.MIGRATE_DATABASE,
"-d", "H2_EMBEDDED",
"-u", url,
"-n", "",
"-p", "",
"--" + BaseFlywayMigrateDatabaseCommand.DONT_USE_FLYWAY
};
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RESOURCE"));
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB"));
App.main(args);
assertTrue(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RESOURCE")); // Early table
assertTrue(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB")); // Late table
}
@Nonnull
private File getLocation(String theDatabaseName) throws IOException {
File directory = new File(DB_DIRECTORY);

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,7 @@
---
type: change
issue: 4040
title: "Removed Flyway database migration engine. The migration table still tracks successful and failed migrations
to determine which migrations need to be run at startup. Database migrations no longer need to run differently when
using an older database version."

View File

@ -68,6 +68,8 @@ SELECT * FROM HFJ_RES_REINDEX_JOB
```
* Rebuild, and start HAPI FHIR JPA again.
# Flyway
# Database Migration
As of version 4.2.0, HAPI FHIR JPA now uses Flyway for schema migrations. The "from" and "to" parameters are no longer used. Flyway maintains a list of completed migrations in a table called `FLY_HFJ_MIGRATION`. When you run the migration command, flyway scans the list of completed migrations in this table and compares them to the list of known migrations, and runs only the new ones.
As of version 6.2.0, HAPI FHIR JPA no longer relies on Flyway for schema migrations. HAPI FHIR still maintains a list of completed migrations in a table called `FLY_HFJ_MIGRATION`. When you run the migration command, HAPI FHIR scans the list of completed migrations in this table and compares them to the list of known migrations and executes only the new ones.

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -22,8 +22,8 @@ package ca.uhn.fhir.jpa.term.loinc;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
import ca.uhn.fhir.jpa.term.IZipContentsHandlerCsv;
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.apache.commons.csv.CSVRecord;
import org.hl7.fhir.r4.model.CodeSystem;

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -33,6 +33,10 @@
<groupId>com.sun.mail</groupId>
<artifactId>javax.mail</artifactId>
</exclusion>
<exclusion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-client</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -59,6 +63,18 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-client</artifactId>
</exclusion>
<exclusion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-dstu2</artifactId>
</exclusion>
<exclusion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-hl7org-dstu2</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -2,11 +2,9 @@ package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -1,7 +1,7 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import org.apache.commons.dbcp2.BasicDataSource;
@ -27,7 +27,7 @@ public abstract class BaseTest {
private static final Logger ourLog = LoggerFactory.getLogger(BaseTest.class);
private BasicDataSource myDataSource;
private String myUrl;
private FlywayMigrator myMigrator;
private HapiMigrator myMigrator;
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
public static Stream<Supplier<TestDatabaseDetails>> data() {
@ -46,7 +46,7 @@ public abstract class BaseTest {
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.H2_EMBEDDED.getDriverClassName());
FlywayMigrator migrator = new FlywayMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.H2_EMBEDDED);
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.H2_EMBEDDED);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@ -67,7 +67,7 @@ public abstract class BaseTest {
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
FlywayMigrator migrator = new FlywayMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@ -86,6 +86,7 @@ public abstract class BaseTest {
myConnectionProperties = testDatabaseDetails.myConnectionProperties;
myDataSource = testDatabaseDetails.myDataSource;
myMigrator = testDatabaseDetails.myMigrator;
myMigrator.createMigrationTableIfRequired();
}
public String getUrl() {
@ -123,7 +124,7 @@ public abstract class BaseTest {
});
}
public FlywayMigrator getMigrator() {
public HapiMigrator getMigrator() {
return myMigrator;
}
@ -143,9 +144,9 @@ public abstract class BaseTest {
private final String myUrl;
private final DriverTypeEnum.ConnectionProperties myConnectionProperties;
private final BasicDataSource myDataSource;
private final FlywayMigrator myMigrator;
private final HapiMigrator myMigrator;
public TestDatabaseDetails(String theUrl, DriverTypeEnum.ConnectionProperties theConnectionProperties, BasicDataSource theDataSource, FlywayMigrator theMigrator) {
public TestDatabaseDetails(String theUrl, DriverTypeEnum.ConnectionProperties theConnectionProperties, BasicDataSource theDataSource, HapiMigrator theMigrator) {
myUrl = theUrl;
myConnectionProperties = theConnectionProperties;
myDataSource = theDataSource;

View File

@ -1,12 +1,12 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.MigrationTaskList;
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertEquals;
@ -33,19 +33,20 @@ public class HashTest {
@Test
public void testCheckAllHashes() {
List<BaseTask> tasks1 = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(VersionEnum.values());
MigrationTaskList tasks1 = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(VersionEnum.values());
Map<String, Integer> hashesByVersion = new HashMap<>();
for (BaseTask task : tasks1) {
String version = task.getFlywayVersion();
tasks1.forEach(task -> {
String version = task.getMigrationVersion();
assertNull(hashesByVersion.get(version), "Duplicate flyway version " + version + " in " + HapiFhirJpaMigrationTasks.class.getName());
hashesByVersion.put(version, task.hashCode());
}
});
List<BaseTask> tasks2 = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(VersionEnum.values());
for (BaseTask task : tasks2) {
String version = task.getFlywayVersion();
MigrationTaskList tasks2 = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(VersionEnum.values());
tasks2.forEach(task -> {
String version = task.getMigrationVersion();
int origHash = hashesByVersion.get(version);
assertEquals(origHash, task.hashCode(), "Hashes differ for task " + version);
}
});
}
}

View File

@ -32,7 +32,6 @@ import org.hibernate.dialect.PostgreSQL10Dialect;
import org.hl7.fhir.r4.model.CodeableConcept;
import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.ValueSet;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-client-okhttp</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-server-jersey</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -72,18 +72,19 @@
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
</dependency>
<!-- SQL Builder -->
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-sqlserver</artifactId>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-mysql</artifactId>
<groupId>com.healthmarketscience.sqlbuilder</groupId>
<artifactId>sqlbuilder</artifactId>
</dependency>
<!-- test -->
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-commons</artifactId>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -1,124 +0,0 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.apache.commons.lang3.Validate;
import org.flywaydb.core.api.callback.Callback;
import javax.annotation.Nonnull;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
public abstract class BaseMigrator implements IMigrator {
private final List<BaseTask.ExecutedStatement> myExecutedStatements = new ArrayList<>();
private boolean myDryRun;
private boolean myNoColumnShrink;
private boolean mySchemaWasInitialized;
private DriverTypeEnum myDriverType;
private DataSource myDataSource;
private boolean myStrictOrder;
private List<Callback> myCallbacks = Collections.emptyList();
@Nonnull
public List<Callback> getCallbacks() {
return myCallbacks;
}
public void setCallbacks(@Nonnull List<Callback> theCallbacks) {
Validate.notNull(theCallbacks);
myCallbacks = theCallbacks;
}
public DataSource getDataSource() {
return myDataSource;
}
public void setDataSource(DataSource theDataSource) {
myDataSource = theDataSource;
}
public boolean isDryRun() {
return myDryRun;
}
public void setDryRun(boolean theDryRun) {
myDryRun = theDryRun;
}
public boolean isNoColumnShrink() {
return myNoColumnShrink;
}
public void setNoColumnShrink(boolean theNoColumnShrink) {
myNoColumnShrink = theNoColumnShrink;
}
public DriverTypeEnum getDriverType() {
return myDriverType;
}
public void setDriverType(DriverTypeEnum theDriverType) {
myDriverType = theDriverType;
}
public boolean isStrictOrder() {
return myStrictOrder;
}
public void setStrictOrder(boolean theStrictOrder) {
myStrictOrder = theStrictOrder;
}
public void addExecutedStatements(List theExecutedStatements) {
myExecutedStatements.addAll(theExecutedStatements);
}
protected StringBuilder buildExecutedStatementsString() {
StringBuilder statementBuilder = new StringBuilder();
String lastTable = null;
for (BaseTask.ExecutedStatement next : myExecutedStatements) {
if (!Objects.equals(lastTable, next.getTableName())) {
statementBuilder.append("\n\n-- Table: ").append(next.getTableName()).append("\n");
lastTable = next.getTableName();
}
statementBuilder.append(next.getSql()).append(";\n");
for (Object nextArg : next.getArguments()) {
statementBuilder.append(" -- Arg: ").append(nextArg).append("\n");
}
}
return statementBuilder;
}
public boolean isSchemaWasInitialized() {
return mySchemaWasInitialized;
}
public BaseMigrator setSchemaWasInitialized(boolean theSchemaWasInitialized) {
mySchemaWasInitialized = theSchemaWasInitialized;
return this;
}
}

View File

@ -1,113 +0,0 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.flywaydb.core.api.MigrationVersion;
import org.flywaydb.core.api.migration.Context;
import org.flywaydb.core.api.migration.JavaMigration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import static org.apache.commons.lang3.StringUtils.isBlank;
public class FlywayMigrationTask implements JavaMigration {
private static final Logger ourLog = LoggerFactory.getLogger(FlywayMigrationTask.class);
private final BaseTask myTask;
private final FlywayMigrator myFlywayMigrator;
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
public FlywayMigrationTask(BaseTask theTask, FlywayMigrator theFlywayMigrator) {
myTask = theTask;
myFlywayMigrator = theFlywayMigrator;
}
@Override
public MigrationVersion getVersion() {
return MigrationVersion.fromVersion(myTask.getFlywayVersion());
}
@Override
public String getDescription() {
return myTask.getDescription();
}
@Override
public Integer getChecksum() {
return myTask.hashCode();
}
@Override
public boolean isUndo() {
return false;
}
@Override
public boolean isBaselineMigration() {
return false;
}
@Override
public boolean canExecuteInTransaction() {
return false;
}
@Override
public void migrate(Context theContext) {
myTask.setDriverType(myFlywayMigrator.getDriverType());
myTask.setDryRun(myFlywayMigrator.isDryRun());
myTask.setNoColumnShrink(myFlywayMigrator.isNoColumnShrink());
myTask.setConnectionProperties(myConnectionProperties);
try {
executeTask();
} catch (SQLException e) {
String description = myTask.getDescription();
if (isBlank(description)) {
description = myTask.getClass().getSimpleName();
}
String prefix = "Failure executing task \"" + description + "\", aborting! Cause: ";
throw new InternalErrorException(Msg.code(47) + prefix + e.toString(), e);
}
}
private void executeTask() throws SQLException {
if (myFlywayMigrator.isSchemaWasInitialized() && !myTask.isRunDuringSchemaInitialization()) {
// Empty schema was initialized, stub out this non-schema-init task since we're starting with a fully migrated schema
myTask.setDoNothing(true);
}
myTask.execute();
if (myTask.initializedSchema()) {
ourLog.info("Empty schema was Initialized. Stubbing out all following migration tasks that are not Schema Initializations.");
myFlywayMigrator.setSchemaWasInitialized(true);
}
myFlywayMigrator.addExecutedStatements(myTask.getExecutedStatements());
}
public void setConnectionProperties(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
myConnectionProperties = theConnectionProperties;
}
}

View File

@ -1,114 +0,0 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask;
import com.google.common.annotations.VisibleForTesting;
import org.flywaydb.core.Flyway;
import org.flywaydb.core.api.MigrationInfoService;
import org.flywaydb.core.api.callback.Callback;
import org.flywaydb.core.api.migration.JavaMigration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
public class FlywayMigrator extends BaseMigrator {
private static final Logger ourLog = LoggerFactory.getLogger(FlywayMigrator.class);
private final String myMigrationTableName;
private final List<FlywayMigrationTask> myTasks = new ArrayList<>();
public FlywayMigrator(String theMigrationTableName, DataSource theDataSource, DriverTypeEnum theDriverType) {
this(theMigrationTableName);
setDataSource(theDataSource);
setDriverType(theDriverType);
}
public FlywayMigrator(String theMigrationTableName) {
myMigrationTableName = theMigrationTableName;
}
public void addTask(BaseTask theTask) {
myTasks.add(new FlywayMigrationTask(theTask, this));
}
@Override
public void migrate() {
try (DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getDataSource())) {
Flyway flyway = initFlyway(connectionProperties);
flyway.repair();
flyway.migrate();
if (isDryRun()) {
StringBuilder statementBuilder = buildExecutedStatementsString();
ourLog.info("SQL that would be executed:\n\n***********************************\n{}***********************************", statementBuilder);
}
} catch (Exception e) {
throw e;
}
}
private Flyway initFlyway(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
Flyway flyway = Flyway.configure()
.table(myMigrationTableName)
.dataSource(theConnectionProperties.getDataSource())
.baselineOnMigrate(true)
// By default, migrations are allowed to be run out of order. You can enforce strict order by setting strictOrder=true.
.outOfOrder(!isStrictOrder())
.javaMigrations(myTasks.toArray(new JavaMigration[0]))
.callbacks(getCallbacks().toArray(new Callback[0]))
.load();
for (FlywayMigrationTask task : myTasks) {
task.setConnectionProperties(theConnectionProperties);
}
return flyway;
}
@Override
public void addTasks(List<BaseTask> theTasks) {
if ("true".equals(System.getProperty("unit_test_mode"))) {
theTasks.stream().filter(task -> task instanceof InitializeSchemaTask).forEach(this::addTask);
} else {
theTasks.forEach(this::addTask);
}
}
@Override
public Optional<MigrationInfoService> getMigrationInfo() {
if (getDriverType() == null) {
return Optional.empty();
}
try (DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getDataSource())) {
Flyway flyway = initFlyway(connectionProperties);
return Optional.of(flyway.info());
}
}
@VisibleForTesting
public void removeAllTasksForUnitTest() {
myTasks.clear();
}
}

View File

@ -0,0 +1,21 @@
package ca.uhn.fhir.jpa.migrate;
public class HapiMigrationException extends RuntimeException {
private MigrationResult myResult;
public HapiMigrationException(String theMessage) {
super(theMessage);
}
public HapiMigrationException(String theMessage, Exception theException) {
super(theMessage, theException);
}
public HapiMigrationException(String theMessage, MigrationResult theResult, Exception theException) {
super(theMessage, theException);
myResult = theResult;
}
public MigrationResult getResult() {
return myResult;
}
}

View File

@ -0,0 +1,67 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.flywaydb.core.api.MigrationVersion;
import java.util.Set;
public class HapiMigrationStorageSvc {
public static final String UNKNOWN_VERSION = "unknown";
private final HapiMigrationDao myHapiMigrationDao;
public HapiMigrationStorageSvc(HapiMigrationDao theHapiMigrationDao) {
myHapiMigrationDao = theHapiMigrationDao;
}
/**
* Returns a list of migration tasks that have not yet been successfully run against the database
* @param theTaskList the full list of tasks for this release
* @return a list of tasks that have not yet been successfully run against the database
*/
public MigrationTaskList diff(MigrationTaskList theTaskList) {
Set<MigrationVersion> appliedMigrationVersions = fetchAppliedMigrationVersions();
return theTaskList.diff(appliedMigrationVersions);
}
/**
*
* @return a list of migration versions that have been successfully run against the database
*/
Set<MigrationVersion> fetchAppliedMigrationVersions() {
return myHapiMigrationDao.fetchSuccessfulMigrationVersions();
}
/**
*
* @return the most recent version that was run against the database (used for logging purposes)
*/
public String getLatestAppliedVersion() {
return fetchAppliedMigrationVersions().stream()
.sorted()
.map(MigrationVersion::toString)
.reduce((first, second) -> second)
.orElse(UNKNOWN_VERSION);
}
/**
* Save a migration task to the database
*/
public void saveTask(BaseTask theBaseTask, Integer theMillis, boolean theSuccess) {
HapiMigrationEntity entity = HapiMigrationEntity.fromBaseTask(theBaseTask);
entity.setExecutionTime(theMillis);
entity.setSuccess(theSuccess);
myHapiMigrationDao.save(entity);
}
/**
* Create the migration table if it does not already exist
*/
public void createMigrationTableIfRequired() {
myHapiMigrationDao.createMigrationTableIfRequired();
}
}

View File

@ -0,0 +1,198 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask;
import ca.uhn.fhir.util.StopWatch;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import static org.apache.commons.lang3.StringUtils.isBlank;
public class HapiMigrator {
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrator.class);
private MigrationTaskList myTaskList = new MigrationTaskList();
private boolean myDryRun;
private boolean myNoColumnShrink;
private final DriverTypeEnum myDriverType;
private final DataSource myDataSource;
private final HapiMigrationStorageSvc myHapiMigrationStorageSvc;
private List<IHapiMigrationCallback> myCallbacks = Collections.emptyList();
public HapiMigrator(String theMigrationTableName, DataSource theDataSource, DriverTypeEnum theDriverType) {
myDriverType = theDriverType;
myDataSource = theDataSource;
myHapiMigrationStorageSvc = new HapiMigrationStorageSvc(new HapiMigrationDao(theDataSource, theDriverType, theMigrationTableName));
}
public DataSource getDataSource() {
return myDataSource;
}
public boolean isDryRun() {
return myDryRun;
}
public void setDryRun(boolean theDryRun) {
myDryRun = theDryRun;
}
public boolean isNoColumnShrink() {
return myNoColumnShrink;
}
public void setNoColumnShrink(boolean theNoColumnShrink) {
myNoColumnShrink = theNoColumnShrink;
}
public DriverTypeEnum getDriverType() {
return myDriverType;
}
protected StringBuilder buildExecutedStatementsString(MigrationResult theMigrationResult) {
StringBuilder statementBuilder = new StringBuilder();
String lastTable = null;
for (BaseTask.ExecutedStatement next : theMigrationResult.executedStatements) {
if (!Objects.equals(lastTable, next.getTableName())) {
statementBuilder.append("\n\n-- Table: ").append(next.getTableName()).append("\n");
lastTable = next.getTableName();
}
statementBuilder.append(next.getSql()).append(";\n");
for (Object nextArg : next.getArguments()) {
statementBuilder.append(" -- Arg: ").append(nextArg).append("\n");
}
}
return statementBuilder;
}
public MigrationResult migrate() {
ourLog.info("Loaded {} migration tasks", myTaskList.size());
MigrationTaskList newTaskList = myHapiMigrationStorageSvc.diff(myTaskList);
ourLog.info("{} of these {} migration tasks are new. Executing them now.", newTaskList.size(), myTaskList.size());
MigrationResult retval = new MigrationResult();
try (DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getDataSource())) {
newTaskList.forEach(next -> {
next.setDriverType(getDriverType());
next.setDryRun(isDryRun());
next.setNoColumnShrink(isNoColumnShrink());
next.setConnectionProperties(connectionProperties);
StopWatch sw = new StopWatch();
try {
if (isDryRun()) {
ourLog.info("Dry run {} {}", next.getMigrationVersion(), next.getDescription());
} else {
ourLog.info("Executing {} {}", next.getMigrationVersion(), next.getDescription());
}
preExecute(next);
next.execute();
postExecute(next, sw, true);
retval.changes += next.getChangesCount();
retval.executedStatements.addAll(next.getExecutedStatements());
retval.succeededTasks.add(next);
} catch (SQLException|HapiMigrationException e) {
retval.failedTasks.add(next);
postExecute(next, sw, false);
String description = next.getDescription();
if (isBlank(description)) {
description = next.getClass().getSimpleName();
}
String prefix = "Failure executing task \"" + description + "\", aborting! Cause: ";
throw new HapiMigrationException(Msg.code(47) + prefix + e, retval, e);
}
});
}
ourLog.info(retval.summary());
if (isDryRun()) {
StringBuilder statementBuilder = buildExecutedStatementsString(retval);
ourLog.info("SQL that would be executed:\n\n***********************************\n{}***********************************", statementBuilder);
}
return retval;
}
private void preExecute(BaseTask theTask) {
myCallbacks.forEach(action -> action.preExecution(theTask));
}
private void postExecute(BaseTask theNext, StopWatch theStopWatch, boolean theSuccess) {
myHapiMigrationStorageSvc.saveTask(theNext, Math.toIntExact(theStopWatch.getMillis()), theSuccess);
}
public void addTasks(Iterable<BaseTask> theMigrationTasks) {
if ("true".equals(System.getProperty("unit_test_mode"))) {
// Tests only need to initialize the schemas. No need to run all the migrations for every test.
for (BaseTask task : theMigrationTasks) {
if (task instanceof InitializeSchemaTask) {
addTask(task);
}
}
} else {
myTaskList.append(theMigrationTasks);
}
}
public void addTask(BaseTask theTask) {
myTaskList.add(theTask);
}
@Nonnull
public List<IHapiMigrationCallback> getCallbacks() {
return myCallbacks;
}
public void setCallbacks(@Nonnull List<IHapiMigrationCallback> theCallbacks) {
Validate.notNull(theCallbacks);
myCallbacks = theCallbacks;
}
@VisibleForTesting
public void removeAllTasksForUnitTest() {
myTaskList.clear();
}
public void createMigrationTableIfRequired() {
myHapiMigrationStorageSvc.createMigrationTableIfRequired();
}
}

View File

@ -0,0 +1,8 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
public interface IHapiMigrationCallback {
default void preExecution(BaseTask theTask) {}
default void postExecution(BaseTask theTask) {}
}

View File

@ -1,36 +0,0 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.flywaydb.core.api.MigrationInfoService;
import java.util.List;
import java.util.Optional;
public interface IMigrator {
void migrate();
Optional<MigrationInfoService> getMigrationInfo();
void addTasks(List<BaseTask> theMigrationTasks);
}

View File

@ -39,7 +39,6 @@ import org.hibernate.engine.jdbc.env.spi.LobCreatorBuilder;
import org.hibernate.engine.jdbc.env.spi.NameQualifierSupport;
import org.hibernate.engine.jdbc.env.spi.QualifiedObjectNameFormatter;
import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
import org.hibernate.engine.jdbc.spi.TypeInfo;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
@ -418,11 +417,6 @@ public class JdbcUtils {
public LobCreatorBuilder getLobCreatorBuilder() {
return null;
}
@Override
public TypeInfo getTypeInfoForJdbcCode(int jdbcTypeCode) {
return null;
}
};
}
};

View File

@ -0,0 +1,21 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import java.util.ArrayList;
import java.util.List;
public class MigrationResult {
public int changes = 0;
public final List<BaseTask.ExecutedStatement> executedStatements = new ArrayList<>();
public final List<BaseTask> succeededTasks = new ArrayList<>();
public final List<BaseTask> failedTasks = new ArrayList<>();
public String summary() {
return String.format("Completed executing %s migration tasks: %s succeeded, %s failed. %s SQL statements were executed.",
succeededTasks.size() + failedTasks.size(),
succeededTasks.size(),
failedTasks.size(),
executedStatements.size());
}
}

View File

@ -0,0 +1,78 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.flywaydb.core.api.MigrationVersion;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
public class MigrationTaskList implements Iterable<BaseTask> {
private final List<BaseTask> myTasks;
public MigrationTaskList() {
myTasks = new ArrayList<>();
}
public MigrationTaskList(List<BaseTask> theTasks) {
myTasks = theTasks;
}
public void addAll(Collection<BaseTask> theTasks) {
myTasks.addAll(theTasks);
}
public void setDoNothingOnSkippedTasks(String theSkipVersions) {
MigrationTaskSkipper.setDoNothingOnSkippedTasks(myTasks, theSkipVersions);
}
public int size() {
return myTasks.size();
}
public MigrationTaskList diff(Set<MigrationVersion> theAppliedMigrationVersions) {
List<BaseTask> unappliedTasks = myTasks.stream()
.filter(task -> !theAppliedMigrationVersions.contains(MigrationVersion.fromVersion(task.getMigrationVersion())))
.collect(Collectors.toList());
return new MigrationTaskList(unappliedTasks);
}
public void append(Iterable<BaseTask> theMigrationTasks) {
for (BaseTask next : theMigrationTasks) {
myTasks.add(next);
}
}
public void add(BaseTask theTask) {
myTasks.add(theTask);
}
public void clear() {
myTasks.clear();
}
@Nonnull
@Override
public Iterator<BaseTask> iterator() {
return myTasks.iterator();
}
public void forEach(Consumer<? super BaseTask> theAction) {
myTasks.forEach(theAction);
}
public String getLastVersion() {
return myTasks.stream()
.map(BaseTask::getMigrationVersion)
.map(MigrationVersion::fromVersion)
.sorted()
.map(MigrationVersion::toString)
.reduce((first, second) -> second)
.orElse(null);
}
}

View File

@ -48,8 +48,8 @@ public class MigrationTaskSkipper {
.collect(Collectors.toSet());
for (BaseTask task : theTasks) {
if (skippedVersionSet.contains(task.getFlywayVersion())) {
ourLog.info("Will skip {}: {}", task.getFlywayVersion(), task.getDescription());
if (skippedVersionSet.contains(task.getMigrationVersion())) {
ourLog.info("Will skip {}: {}", task.getMigrationVersion(), task.getDescription());
task.setDoNothing(true);
}
}

View File

@ -1,138 +0,0 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import static org.apache.commons.lang3.StringUtils.isBlank;
public class Migrator {
private static final Logger ourLog = LoggerFactory.getLogger(Migrator.class);
private DriverTypeEnum myDriverType;
private String myConnectionUrl;
private String myUsername;
private String myPassword;
private List<BaseTask> myTasks = new ArrayList<>();
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
private int myChangesCount;
private boolean myDryRun;
private List<BaseTask.ExecutedStatement> myExecutedStatements = new ArrayList<>();
private boolean myNoColumnShrink;
public int getChangesCount() {
return myChangesCount;
}
public void setDriverType(DriverTypeEnum theDriverType) {
myDriverType = theDriverType;
}
public void setConnectionUrl(String theConnectionUrl) {
myConnectionUrl = theConnectionUrl;
}
public void setUsername(String theUsername) {
myUsername = theUsername;
}
public void setPassword(String thePassword) {
myPassword = thePassword;
}
public void addTask(BaseTask theTask) {
myTasks.add(theTask);
}
public void setDryRun(boolean theDryRun) {
myDryRun = theDryRun;
}
public void migrate() {
ourLog.info("Starting migration with {} tasks", myTasks.size());
myConnectionProperties = myDriverType.newConnectionProperties(myConnectionUrl, myUsername, myPassword);
try {
for (BaseTask next : myTasks) {
next.setDriverType(myDriverType);
next.setConnectionProperties(myConnectionProperties);
next.setDryRun(myDryRun);
next.setNoColumnShrink(myNoColumnShrink);
try {
next.execute();
} catch (SQLException e) {
String description = next.getDescription();
if (isBlank(description)) {
description = next.getClass().getSimpleName();
}
String prefix = "Failure executing task \"" + description + "\", aborting! Cause: ";
throw new InternalErrorException(Msg.code(44) + prefix + e.toString(), e);
}
myChangesCount += next.getChangesCount();
myExecutedStatements.addAll(next.getExecutedStatements());
}
} finally {
myConnectionProperties.close();
}
ourLog.info("Finished migration of {} tasks", myTasks.size());
if (myDryRun) {
StringBuilder statementBuilder = new StringBuilder();
String lastTable = null;
for (BaseTask.ExecutedStatement next : myExecutedStatements) {
if (!Objects.equals(lastTable, next.getTableName())) {
statementBuilder.append("\n\n-- Table: ").append(next.getTableName()).append("\n");
lastTable = next.getTableName();
}
statementBuilder.append(next.getSql()).append(";\n");
for (Object nextArg : next.getArguments()) {
statementBuilder.append(" -- Arg: ").append(nextArg).append("\n");
}
}
ourLog.info("SQL that would be executed:\n\n***********************************\n{}***********************************", statementBuilder);
}
}
public void addTasks(List<BaseTask> theTasks) {
theTasks.forEach(this::addTask);
}
public void setNoColumnShrink(boolean theNoColumnShrink) {
myNoColumnShrink = theNoColumnShrink;
}
}

View File

@ -20,23 +20,17 @@ package ca.uhn.fhir.jpa.migrate;
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.flywaydb.core.api.MigrationInfo;
import org.flywaydb.core.api.MigrationInfoService;
import org.flywaydb.core.api.callback.Callback;
import ca.uhn.fhir.i18n.Msg;
import org.hibernate.cfg.AvailableSettings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.Assert;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.Properties;
public class SchemaMigrator {
@ -46,35 +40,22 @@ public class SchemaMigrator {
private final DataSource myDataSource;
private final boolean mySkipValidation;
private final String myMigrationTableName;
private final List<BaseTask> myMigrationTasks;
private boolean myDontUseFlyway;
private boolean myStrictOrder;
private final MigrationTaskList myMigrationTasks;
private DriverTypeEnum myDriverType;
private List<Callback> myCallbacks = Collections.emptyList();
private List<IHapiMigrationCallback> myCallbacks = Collections.emptyList();
private final HapiMigrationStorageSvc myHapiMigrationStorageSvc;
/**
* Constructor
*/
public SchemaMigrator(String theSchemaName, String theMigrationTableName, DataSource theDataSource, Properties jpaProperties, List<BaseTask> theMigrationTasks) {
public SchemaMigrator(String theSchemaName, String theMigrationTableName, DataSource theDataSource, Properties jpaProperties, MigrationTaskList theMigrationTasks, HapiMigrationStorageSvc theHapiMigrationStorageSvc) {
mySchemaName = theSchemaName;
myDataSource = theDataSource;
myMigrationTableName = theMigrationTableName;
myMigrationTasks = theMigrationTasks;
mySkipValidation = jpaProperties.containsKey(AvailableSettings.HBM2DDL_AUTO) && "update".equals(jpaProperties.getProperty(AvailableSettings.HBM2DDL_AUTO));
}
public void setCallbacks(List<Callback> theCallbacks) {
Assert.notNull(theCallbacks);
myCallbacks = theCallbacks;
}
public void setDontUseFlyway(boolean theDontUseFlyway) {
myDontUseFlyway = theDontUseFlyway;
}
public void setStrictOrder(boolean theStrictOrder) {
myStrictOrder = theStrictOrder;
myHapiMigrationStorageSvc = theHapiMigrationStorageSvc;
}
public void validate() {
@ -83,69 +64,55 @@ public class SchemaMigrator {
return;
}
try (Connection connection = myDataSource.getConnection()) {
Optional<MigrationInfoService> migrationInfo = newMigrator().getMigrationInfo();
if (migrationInfo.isPresent()) {
if (migrationInfo.get().pending().length > 0) {
MigrationTaskList unappliedMigrations = myHapiMigrationStorageSvc.diff(myMigrationTasks);
String url = connection.getMetaData().getURL();
throw new ConfigurationException(Msg.code(27) + "The database schema for " + url + " is out of date. " +
"Current database schema version is " + getCurrentVersion(migrationInfo.get()) + ". Schema version required by application is " +
getLastVersion(migrationInfo.get()) + ". Please run the database migrator.");
}
ourLog.info("Database schema confirmed at expected version " + getCurrentVersion(migrationInfo.get()));
if (unappliedMigrations.size() > 0) {
String url = connection.getMetaData().getURL();
throw new ConfigurationException(Msg.code(27) + "The database schema for " + url + " is out of date. " +
"Current database schema version is " + myHapiMigrationStorageSvc.getLatestAppliedVersion() + ". Schema version required by application is " +
unappliedMigrations.getLastVersion() + ". Please run the database migrator.");
}
ourLog.info("Database schema confirmed at expected version " + myHapiMigrationStorageSvc.getLatestAppliedVersion());
} catch (SQLException e) {
throw new ConfigurationException(Msg.code(28) + "Unable to connect to " + myDataSource, e);
}
}
public void migrate() {
public MigrationResult migrate() {
if (mySkipValidation) {
ourLog.warn("Database running in hibernate auto-update mode. Skipping schema migration.");
return;
return null;
}
try {
ourLog.info("Migrating " + mySchemaName);
newMigrator().migrate();
ourLog.info(mySchemaName + " migrated successfully.");
MigrationResult retval = newMigrator().migrate();
ourLog.info(mySchemaName + " migrated successfully: {}", retval.summary());
return retval;
} catch (Exception e) {
ourLog.error("Failed to migrate " + mySchemaName, e);
throw e;
}
}
private BaseMigrator newMigrator() {
BaseMigrator migrator;
if (myDontUseFlyway) {
migrator = new TaskOnlyMigrator();
migrator.setDriverType(myDriverType);
migrator.setDataSource(myDataSource);
} else {
migrator = new FlywayMigrator(myMigrationTableName, myDataSource, myDriverType);
migrator.setStrictOrder(myStrictOrder);
}
private HapiMigrator newMigrator() {
HapiMigrator migrator;
migrator = new HapiMigrator(myMigrationTableName, myDataSource, myDriverType);
migrator.addTasks(myMigrationTasks);
migrator.setCallbacks(myCallbacks);
return migrator;
}
private String getCurrentVersion(MigrationInfoService theMigrationInfo) {
MigrationInfo migrationInfo = theMigrationInfo.current();
if (migrationInfo == null) {
return "unknown";
}
return migrationInfo.getVersion().toString();
}
private String getLastVersion(MigrationInfoService theMigrationInfo) {
MigrationInfo[] pending = theMigrationInfo.pending();
if (pending.length > 0) {
return pending[pending.length - 1].getVersion().toString();
}
return "unknown";
}
public void setDriverType(DriverTypeEnum theDriverType) {
myDriverType = theDriverType;
}
public void setCallbacks(List<IHapiMigrationCallback> theCallbacks) {
myCallbacks = theCallbacks;
}
public void createMigrationTableIfRequired() {
myHapiMigrationStorageSvc.createMigrationTableIfRequired();
}
}

View File

@ -1,81 +0,0 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.flywaydb.core.api.MigrationInfoService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
/**
* This class is an alternative to {@link FlywayMigrator). It doesn't use Flyway, but instead just
* executes all tasks.
*/
public class TaskOnlyMigrator extends BaseMigrator {
private static final Logger ourLog = LoggerFactory.getLogger(TaskOnlyMigrator.class);
private List<BaseTask> myTasks = new ArrayList<>();
@Override
public void migrate() {
DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getDataSource());
for (BaseTask next : myTasks) {
next.setDriverType(getDriverType());
next.setDryRun(isDryRun());
next.setNoColumnShrink(isNoColumnShrink());
next.setConnectionProperties(connectionProperties);
try {
if (isDryRun()) {
ourLog.info("Dry run {} {}", next.getFlywayVersion(), next.getDescription());
} else {
ourLog.info("Executing {} {}", next.getFlywayVersion(), next.getDescription());
}
next.execute();
addExecutedStatements(next.getExecutedStatements());
} catch (SQLException e) {
throw new InternalErrorException(Msg.code(48) + e);
}
}
if (isDryRun()) {
StringBuilder statementBuilder = buildExecutedStatementsString();
ourLog.info("SQL that would be executed:\n\n***********************************\n{}***********************************", statementBuilder);
}
}
@Override
public Optional<MigrationInfoService> getMigrationInfo() {
return Optional.empty();
}
@Override
public void addTasks(List<BaseTask> theMigrationTasks) {
myTasks.addAll(theMigrationTasks);
}
}

View File

@ -0,0 +1,115 @@
package ca.uhn.fhir.jpa.migrate.dao;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.VersionEnum;
import org.apache.commons.lang3.Validate;
import org.flywaydb.core.api.MigrationVersion;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class HapiMigrationDao {
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrationDao.class);
private final JdbcTemplate myJdbcTemplate;
private final String myMigrationTablename;
private final MigrationQueryBuilder myMigrationQueryBuilder;
private final DataSource myDataSource;
public HapiMigrationDao(DataSource theDataSource, DriverTypeEnum theDriverType, String theMigrationTablename) {
myDataSource = theDataSource;
myJdbcTemplate = new JdbcTemplate(theDataSource);
myMigrationTablename = theMigrationTablename;
myMigrationQueryBuilder = new MigrationQueryBuilder(theDriverType, theMigrationTablename);
}
public Set<MigrationVersion> fetchSuccessfulMigrationVersions() {
List<HapiMigrationEntity> allEntries = findAll();
return allEntries.stream()
.filter(HapiMigrationEntity::getSuccess)
.map(HapiMigrationEntity::getVersion)
.map(MigrationVersion::fromVersion)
.collect(Collectors.toSet());
}
public void deleteAll() {
myJdbcTemplate.execute(myMigrationQueryBuilder.deleteAll());
}
public HapiMigrationEntity save(HapiMigrationEntity theEntity) {
Validate.notNull(theEntity.getDescription(), "Description may not be null");
Validate.notNull(theEntity.getExecutionTime(), "Execution time may not be null");
Validate.notNull(theEntity.getSuccess(), "Success may not be null");
Integer highestKey = getHighestKey();
if (highestKey == null || highestKey < 0) {
highestKey = 0;
}
Integer nextAvailableKey = highestKey + 1;
theEntity.setPid(nextAvailableKey);
theEntity.setType("JDBC");
theEntity.setScript("HAPI FHIR");
theEntity.setInstalledBy(VersionEnum.latestVersion().name());
theEntity.setInstalledOn(new Date());
String insertRecordStatement = myMigrationQueryBuilder.insertPreparedStatement();
int result = myJdbcTemplate.update(insertRecordStatement, theEntity.asPreparedStatementSetter());
return theEntity;
}
private Integer getHighestKey() {
String highestKeyQuery = myMigrationQueryBuilder.getHighestKeyQuery();
return myJdbcTemplate.queryForObject(highestKeyQuery, Integer.class);
}
public void createMigrationTableIfRequired() {
if (migrationTableExists()) {
return;
}
ourLog.info("Creating table {}", myMigrationTablename);
String createTableStatement = myMigrationQueryBuilder.createTableStatement();
ourLog.info(createTableStatement);
myJdbcTemplate.execute(createTableStatement);
String createIndexStatement = myMigrationQueryBuilder.createIndexStatement();
ourLog.info(createIndexStatement);
myJdbcTemplate.execute(createIndexStatement);
}
private boolean migrationTableExists() {
try {
try (Connection connection = myDataSource.getConnection()) {
ResultSet tables = connection.getMetaData().getTables(connection.getCatalog(), connection.getSchema(), null, null);
while (tables.next()) {
String tableName = tables.getString("TABLE_NAME");
if (myMigrationTablename.equalsIgnoreCase(tableName)) {
return true;
}
}
return false;
}
} catch (SQLException e) {
throw new InternalErrorException(Msg.code(2141) + e);
}
}
public List<HapiMigrationEntity> findAll() {
String allQuery = myMigrationQueryBuilder.findAllQuery();
ourLog.debug("Executing query: [{}]", allQuery);
return myJdbcTemplate.query(allQuery, HapiMigrationEntity.rowMapper());
}
}

View File

@ -0,0 +1,165 @@
package ca.uhn.fhir.jpa.migrate.dao;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeToDriverTypeToSqlType;
import com.healthmarketscience.common.util.AppendableExt;
import com.healthmarketscience.sqlbuilder.CreateIndexQuery;
import com.healthmarketscience.sqlbuilder.CreateTableQuery;
import com.healthmarketscience.sqlbuilder.DeleteQuery;
import com.healthmarketscience.sqlbuilder.FunctionCall;
import com.healthmarketscience.sqlbuilder.InsertQuery;
import com.healthmarketscience.sqlbuilder.SelectQuery;
import com.healthmarketscience.sqlbuilder.SqlObject;
import com.healthmarketscience.sqlbuilder.ValidationContext;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbSchema;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbSpec;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.sql.Types;
public class MigrationQueryBuilder {
private static final Logger ourLog = LoggerFactory.getLogger(MigrationQueryBuilder.class);
private final DbSpec mySpec;
private final DbSchema mySchema;
private final DbTable myTable;
private final DbColumn myVersionCol;
private final DbColumn myInstalledRankCol;
private final DbColumn myDescriptionCol;
private final DbColumn myTypeCol;
private final DbColumn myScriptCol;
private final DbColumn myChecksumCol;
private final DbColumn myInstalledByCol;
private final DbColumn myInstalledOnCol;
private final DbColumn myExecutionTimeCol;
private final DbColumn mySuccessCol;
private final String myDeleteAll;
private final String myHighestKeyQuery;
private final DriverTypeEnum myDriverType;
private final String myMigrationTablename;
private final String myBooleanType;
public MigrationQueryBuilder(DriverTypeEnum theDriverType, String theMigrationTablename) {
myDriverType = theDriverType;
myMigrationTablename = theMigrationTablename;
mySpec = new DbSpec();
mySchema = mySpec.addDefaultSchema();
myTable = mySchema.addTable("\"" + theMigrationTablename + "\"");
myInstalledRankCol = myTable.addColumn("\"installed_rank\"", Types.INTEGER, null);
myInstalledRankCol.notNull();
myVersionCol = myTable.addColumn("\"version\"", Types.VARCHAR, HapiMigrationEntity.VERSION_MAX_SIZE);
myDescriptionCol = myTable.addColumn("\"description\"", Types.VARCHAR, HapiMigrationEntity.DESCRIPTION_MAX_SIZE);
myDescriptionCol.notNull();
myTypeCol = myTable.addColumn("\"type\"", Types.VARCHAR, HapiMigrationEntity.TYPE_MAX_SIZE);
myTypeCol.notNull();
myScriptCol = myTable.addColumn("\"script\"", Types.VARCHAR, HapiMigrationEntity.SCRIPT_MAX_SIZE);
myScriptCol.notNull();
myChecksumCol = myTable.addColumn("\"checksum\"", Types.INTEGER, null);
myInstalledByCol = myTable.addColumn("\"installed_by\"", Types.VARCHAR, HapiMigrationEntity.INSTALLED_BY_MAX_SIZE);
myInstalledByCol.notNull();
myInstalledOnCol = myTable.addColumn("\"installed_on\"", Types.DATE, null);
myInstalledOnCol.notNull();
myExecutionTimeCol = myTable.addColumn("\"execution_time\"", Types.INTEGER, null);
myExecutionTimeCol.notNull();
myBooleanType = ColumnTypeToDriverTypeToSqlType.getColumnTypeToDriverTypeToSqlType().get(ColumnTypeEnum.BOOLEAN).get(theDriverType);
mySuccessCol = myTable.addColumn("\"success\"", myBooleanType, null);
mySuccessCol.notNull();
myDeleteAll = new DeleteQuery(myTable).toString();
myHighestKeyQuery = buildHighestKeyQuery();
}
public String deleteAll() {
return myDeleteAll;
}
public String getHighestKeyQuery() {
return myHighestKeyQuery;
}
private String buildHighestKeyQuery() {
return new SelectQuery()
.addCustomColumns(FunctionCall.max().addColumnParams(myInstalledRankCol))
.validate()
.toString();
}
public String insertPreparedStatement() {
return new InsertQuery(myTable)
.addPreparedColumns(myInstalledRankCol,
myVersionCol,
myDescriptionCol,
myTypeCol,
myScriptCol,
myChecksumCol,
myInstalledByCol,
myInstalledOnCol,
myExecutionTimeCol,
mySuccessCol)
.validate()
.toString();
}
/**
sqlbuilder doesn't know about different database types, so we have to manually map boolean columns ourselves :-(
I'm gaining a new appreciation for Hibernate. I tried using hibernate for maintaining this table, but it added
a lot of overhead for managing just one table. Seeing this sort of nonsense though makes me wonder if we should
just use it instead of sqlbuilder. Disappointed that sqlbuilder doesn't handle boolean well out of the box.
(It does support a static option via System.setProperty("com.healthmarketscience.sqlbuilder.useBooleanLiterals", "true");
but that only works at classloading time which isn't much help to us.
*/
private SqlObject getBooleanValue(Boolean theBoolean) {
SqlObject retval = new SqlObject() {
@Override
protected void collectSchemaObjects(ValidationContext vContext) {}
@Override
public void appendTo(AppendableExt app) throws IOException {
String stringValue = ColumnTypeToDriverTypeToSqlType.toBooleanValue(myDriverType, theBoolean);
app.append(stringValue);
}
};
return retval;
}
public String createTableStatement() {
return new CreateTableQuery(myTable, true)
.validate()
.toString();
}
public String createIndexStatement() {
return new CreateIndexQuery(myTable, myMigrationTablename.toUpperCase() + "_PK_INDEX")
.setIndexType(CreateIndexQuery.IndexType.UNIQUE)
.addColumns(myInstalledRankCol)
.validate()
.toString();
}
public String findAllQuery() {
return new SelectQuery()
.addFromTable(myTable)
.addAllColumns()
.validate()
.toString();
}
}

View File

@ -0,0 +1,181 @@
package ca.uhn.fhir.jpa.migrate.entity;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.springframework.jdbc.core.PreparedStatementSetter;
import org.springframework.jdbc.core.RowMapper;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.SequenceGenerator;
import java.util.Date;
// Note even though we are using javax.persistence annotations here, we are managing these records outside of jpa
// so these annotations are for informational purposes only
@Entity
public class HapiMigrationEntity {
public static final int VERSION_MAX_SIZE = 50;
public static final int DESCRIPTION_MAX_SIZE = 200;
public static final int TYPE_MAX_SIZE = 20;
public static final int SCRIPT_MAX_SIZE = 1000;
public static final int INSTALLED_BY_MAX_SIZE = 100;
@Id
@SequenceGenerator(name = "SEQ_FLY_HFJ_MIGRATION", sequenceName = "SEQ_FLY_HFJ_MIGRATION")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_FLY_HFJ_MIGRATION")
@Column(name = "INSTALLED_RANK")
private Integer myPid;
@Column(name = "VERSION", length = VERSION_MAX_SIZE)
private String myVersion;
@Column(name = "DESCRIPTION", length = DESCRIPTION_MAX_SIZE)
private String myDescription;
@Column(name = "TYPE", length = TYPE_MAX_SIZE)
private String myType;
@Column(name = "SCRIPT", length = SCRIPT_MAX_SIZE)
private String myScript;
@Column(name = "CHECKSUM")
private Integer myChecksum;
@Column(name = "INSTALLED_BY", length = INSTALLED_BY_MAX_SIZE)
private String myInstalledBy;
@Column(name = "INSTALLED_ON")
private Date myInstalledOn;
@Column(name = "EXECUTION_TIME")
private Integer myExecutionTime;
@Column(name = "SUCCESS")
private Boolean mySuccess;
public Integer getPid() {
return myPid;
}
public void setPid(Integer thePid) {
myPid = thePid;
}
public String getVersion() {
return myVersion;
}
public void setVersion(String theVersion) {
myVersion = theVersion;
}
public String getDescription() {
return myDescription;
}
public void setDescription(String theDescription) {
myDescription = theDescription;
}
public String getType() {
return myType;
}
public void setType(String theType) {
myType = theType;
}
public String getScript() {
return myScript;
}
public void setScript(String theScript) {
myScript = theScript;
}
public Integer getChecksum() {
return myChecksum;
}
public void setChecksum(Integer theChecksum) {
myChecksum = theChecksum;
}
public String getInstalledBy() {
return myInstalledBy;
}
public void setInstalledBy(String theInstalledBy) {
myInstalledBy = theInstalledBy;
}
public Date getInstalledOn() {
return myInstalledOn;
}
public void setInstalledOn(Date theInstalledOn) {
myInstalledOn = theInstalledOn;
}
public Integer getExecutionTime() {
return myExecutionTime;
}
public void setExecutionTime(Integer theExecutionTime) {
myExecutionTime = theExecutionTime;
}
public Boolean getSuccess() {
return mySuccess;
}
public void setSuccess(Boolean theSuccess) {
mySuccess = theSuccess;
}
public static HapiMigrationEntity fromBaseTask(BaseTask theTask) {
HapiMigrationEntity retval = new HapiMigrationEntity();
retval.setVersion(theTask.getMigrationVersion());
retval.setDescription(theTask.getDescription());
retval.setChecksum(theTask.hashCode());
retval.setType("JDBC");
return retval;
}
public static RowMapper<HapiMigrationEntity> rowMapper() {
return (rs, rowNum) -> {
HapiMigrationEntity entity = new HapiMigrationEntity();
entity.setPid(rs.getInt(1));
entity.setVersion(rs.getString(2));
entity.setDescription(rs.getString(3));
entity.setType(rs.getString(4));
entity.setScript(rs.getString(5));
entity.setChecksum(rs.getInt(6));
entity.setInstalledBy(rs.getString(7));
entity.setInstalledOn(rs.getDate(8));
entity.setExecutionTime(rs.getInt(9));
entity.setSuccess(rs.getBoolean(10));
return entity;
};
}
public PreparedStatementSetter asPreparedStatementSetter() {
return ps -> {
ps.setInt(1, getPid());
ps.setString(2, getVersion());
ps.setString(3, getDescription());
ps.setString(4, getType());
ps.setString(5, getScript());
if (getChecksum() == null) {
ps.setNull(6, java.sql.Types.INTEGER);
} else {
ps.setInt(6, getChecksum());
}
ps.setString(7, getInstalledBy());
ps.setDate(8, getInstalledOn() != null ? new java.sql.Date(getInstalledOn().getTime()) : null);
ps.setInt(9, getExecutionTime());
ps.setBoolean(10, getSuccess());
};
}
}

View File

@ -28,7 +28,6 @@ import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.intellij.lang.annotations.Language;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import java.sql.SQLException;
import java.util.ArrayList;
@ -73,16 +72,7 @@ public class AddTableRawSqlTask extends BaseTableTask {
sqlStatements.addAll(myDriverNeutralSqls);
logInfo(ourLog, "Going to create table {} using {} SQL statements", getTableName(), sqlStatements.size());
getConnectionProperties().getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
for (String nextSql : sqlStatements) {
jdbcTemplate.execute(nextSql);
}
return null;
});
executeSqlListInTransaction(getTableName(), sqlStatements);
}
public void addSql(String theSql) {

View File

@ -115,9 +115,4 @@ public abstract class BaseTableColumnTypeTask extends BaseTableColumnTask {
}
return myColumnType.name();
}
public ColumnTypeToDriverTypeToSqlType getColumnTypeToDriverTypeToSqlType() {
return myColumnTypeToDriverTypeToSqlType;
}
}

View File

@ -27,7 +27,6 @@ import org.apache.commons.lang3.builder.HashCodeBuilder;
import java.util.Objects;
public abstract class BaseTableTask extends BaseTask {
protected final ColumnTypeToDriverTypeToSqlType myColumnTypeToDriverTypeToSqlType = new ColumnTypeToDriverTypeToSqlType();
private String myTableName;
@ -57,7 +56,7 @@ public abstract class BaseTableTask extends BaseTask {
}
protected String getSqlType(ColumnTypeEnum theColumnType, Long theColumnLength) {
String retVal = myColumnTypeToDriverTypeToSqlType.getColumnTypeToDriverTypeToSqlType().get(theColumnType).get(getDriverType());
String retVal = ColumnTypeToDriverTypeToSqlType.getColumnTypeToDriverTypeToSqlType().get(theColumnType).get(getDriverType());
Objects.requireNonNull(retVal);
if (theColumnType == ColumnTypeEnum.STRING) {

View File

@ -22,9 +22,11 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.flywaydb.core.api.MigrationVersion;
import org.intellij.lang.annotations.Language;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -52,7 +54,7 @@ public abstract class BaseTask {
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
private DriverTypeEnum myDriverType;
private String myDescription;
private int myChangesCount;
private Integer myChangesCount = 0;
private boolean myDryRun;
/**
@ -147,11 +149,9 @@ public abstract class BaseTask {
if (!isDryRun()) {
Integer changes;
if (myTransactional) {
changes = getConnectionProperties().getTxTemplate().execute(t -> {
return doExecuteSql(theSql, theArguments);
});
changes = getConnectionProperties().getTxTemplate().execute(t -> doExecuteSql(theSql, theArguments));
} else {
changes = doExecuteSql(theSql, theArguments);
changes = doExecuteSql(theSql, theArguments);
}
myChangesCount += changes;
@ -160,7 +160,28 @@ public abstract class BaseTask {
captureExecutedStatement(theTableName, theSql, theArguments);
}
private int doExecuteSql(@Language("SQL") String theSql, Object[] theArguments) {
protected void executeSqlListInTransaction(String theTableName, List<String> theSqlStatements) {
if (!isDryRun()) {
Integer changes;
changes = getConnectionProperties().getTxTemplate().execute(t -> doExecuteSqlList(theSqlStatements));
myChangesCount += changes;
}
for (@Language("SQL") String sqlStatement : theSqlStatements) {
captureExecutedStatement(theTableName, sqlStatement);
}
}
private Integer doExecuteSqlList(List<String> theSqlStatements) {
int changesCount = 0;
for (String nextSql : theSqlStatements) {
changesCount += doExecuteSql(nextSql);
}
return changesCount;
}
private int doExecuteSql(@Language("SQL") String theSql, Object... theArguments) {
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
// 0 means no timeout -- we use this for index rebuilds that may take time.
jdbcTemplate.setQueryTimeout(0);
@ -172,18 +193,16 @@ public abstract class BaseTask {
return changesCount;
} catch (DataAccessException e) {
if (myFailureAllowed) {
ourLog.info("Task {} did not exit successfully, but task is allowed to fail", getFlywayVersion());
ourLog.info("Task {} did not exit successfully, but task is allowed to fail", getMigrationVersion());
ourLog.debug("Error was: {}", e.getMessage(), e);
return 0;
} else {
throw new DataAccessException(Msg.code(61) + "Failed during task " + getFlywayVersion() + ": " + e, e) {
private static final long serialVersionUID = 8211678931579252166L;
};
throw new HapiMigrationException(Msg.code(61) + "Failed during task " + getMigrationVersion() + ": " + e, e);
}
}
}
protected void captureExecutedStatement(String theTableName, @Language("SQL") String theSql, Object[] theArguments) {
protected void captureExecutedStatement(String theTableName, @Language("SQL") String theSql, Object... theArguments) {
myExecutedStatements.add(new ExecutedStatement(theTableName, theSql, theArguments));
}
@ -226,12 +245,6 @@ public abstract class BaseTask {
return;
}
}
if (!myOnlyAppliesToPlatforms.isEmpty()) {
if (!myOnlyAppliesToPlatforms.contains(getDriverType())) {
ourLog.debug("Skipping task {} as it does not apply to {}", getDescription(), getDriverType());
return;
}
}
doExecute();
}
@ -245,16 +258,18 @@ public abstract class BaseTask {
myFailureAllowed = theFailureAllowed;
}
public String getFlywayVersion() {
public String getMigrationVersion() {
String releasePart = myProductVersion;
if (releasePart.startsWith("V")) {
releasePart = releasePart.substring(1);
}
return releasePart + "." + mySchemaVersion;
String version = releasePart + "." + mySchemaVersion;
MigrationVersion migrationVersion = MigrationVersion.fromVersion(version);
return migrationVersion.getVersion();
}
protected void logInfo(Logger theLog, String theFormattedMessage, Object... theArguments) {
theLog.info(getFlywayVersion() + ": " + theFormattedMessage, theArguments);
theLog.info(getMigrationVersion() + ": " + theFormattedMessage, theArguments);
}
public void validateVersion() {

View File

@ -26,10 +26,12 @@ import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import java.util.HashMap;
import java.util.Map;
public class ColumnTypeToDriverTypeToSqlType {
Map<ColumnTypeEnum, Map<DriverTypeEnum, String>> myColumnTypeToDriverTypeToSqlType = new HashMap<>();
public final class ColumnTypeToDriverTypeToSqlType {
public ColumnTypeToDriverTypeToSqlType() {
private ColumnTypeToDriverTypeToSqlType() {}
static Map<ColumnTypeEnum, Map<DriverTypeEnum, String>> myColumnTypeToDriverTypeToSqlType = new HashMap<>();
static {
setColumnType(ColumnTypeEnum.INT, DriverTypeEnum.H2_EMBEDDED, "integer");
setColumnType(ColumnTypeEnum.INT, DriverTypeEnum.DERBY_EMBEDDED, "integer");
setColumnType(ColumnTypeEnum.INT, DriverTypeEnum.MARIADB_10_1, "integer");
@ -111,15 +113,26 @@ public class ColumnTypeToDriverTypeToSqlType {
setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.MSSQL_2012, "varchar(MAX)");
}
public Map<ColumnTypeEnum, Map<DriverTypeEnum, String>> getColumnTypeToDriverTypeToSqlType() {
public static Map<ColumnTypeEnum, Map<DriverTypeEnum, String>> getColumnTypeToDriverTypeToSqlType() {
return myColumnTypeToDriverTypeToSqlType;
}
private void setColumnType(ColumnTypeEnum theColumnType, DriverTypeEnum theDriverType, String theColumnTypeSql) {
private static void setColumnType(ColumnTypeEnum theColumnType, DriverTypeEnum theDriverType, String theColumnTypeSql) {
Map<DriverTypeEnum, String> columnSqlType = myColumnTypeToDriverTypeToSqlType.computeIfAbsent(theColumnType, k -> new HashMap<>());
if (columnSqlType.containsKey(theDriverType)) {
throw new IllegalStateException(Msg.code(65) + "Duplicate key: " + theDriverType);
}
columnSqlType.put(theDriverType, theColumnTypeSql);
}
public static String toBooleanValue(DriverTypeEnum theDriverType, Boolean theBoolean) {
switch (theDriverType) {
case H2_EMBEDDED:
case DERBY_EMBEDDED:
case POSTGRES_9_4:
return theBoolean.toString();
default:
return theBoolean ? "1" : "0";
}
}
}

View File

@ -33,7 +33,7 @@ import java.util.List;
import java.util.Set;
public class InitializeSchemaTask extends BaseTask {
public static final String DESCRIPTION_PREFIX = "Initialize schema for ";
private static final String DESCRIPTION_PREFIX = "Initialize schema for ";
private static final Logger ourLog = LoggerFactory.getLogger(InitializeSchemaTask.class);
private final ISchemaInitializationProvider mySchemaInitializationProvider;
private boolean myInitializedSchema;

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.migrate.tasks.api;
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.MigrationTaskList;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import com.google.common.collect.Multimap;
import com.google.common.collect.MultimapBuilder;
@ -29,21 +30,19 @@ import org.apache.commons.lang3.Validate;
import org.flywaydb.core.api.MigrationVersion;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
public class BaseMigrationTasks<T extends Enum> {
MigrationVersion lastVersion;
private Multimap<T, BaseTask> myTasks = MultimapBuilder.hashKeys().arrayListValues().build();
@SuppressWarnings("unchecked")
public List<BaseTask> getTasks(@Nonnull T theFrom, @Nonnull T theTo) {
public MigrationTaskList getTaskList(@Nonnull T theFrom, @Nonnull T theTo) {
Validate.notNull(theFrom);
Validate.notNull(theTo);
Validate.isTrue(theFrom.ordinal() < theTo.ordinal(), "From version must be lower than to version");
List<BaseTask> retVal = new ArrayList<>();
MigrationTaskList retVal = new MigrationTaskList();
for (Object nextVersion : EnumUtils.getEnumList(theFrom.getClass())) {
if (((T) nextVersion).ordinal() <= theFrom.ordinal()) {
continue;
@ -53,9 +52,7 @@ public class BaseMigrationTasks<T extends Enum> {
}
Collection<BaseTask> nextValues = myTasks.get((T) nextVersion);
if (nextValues != null) {
retVal.addAll(nextValues);
}
retVal.addAll(nextValues);
}
return retVal;
@ -74,8 +71,8 @@ public class BaseMigrationTasks<T extends Enum> {
return theRelease.name();
}
public List<BaseTask> getAllTasks(T[] theVersionEnumValues) {
List<BaseTask> retval = new ArrayList<>();
public MigrationTaskList getAllTasks(T[] theVersionEnumValues) {
MigrationTaskList retval = new MigrationTaskList();
for (T nextVersion : theVersionEnumValues) {
Collection<BaseTask> nextValues = myTasks.get(nextVersion);
if (nextValues != null) {
@ -87,9 +84,12 @@ public class BaseMigrationTasks<T extends Enum> {
return retval;
}
protected BaseTask getTaskWithVersion(String theFlywayVersion) {
protected BaseTask getTaskWithVersion(String theMigrationVersion) {
// First normalize the version number
String expectedVersion = MigrationVersion.fromVersion(theMigrationVersion).getVersion();
return myTasks.values().stream()
.filter(task -> theFlywayVersion.equals(task.getFlywayVersion()))
.filter(task -> expectedVersion.equals(task.getMigrationVersion()))
.findFirst()
.get();
}
@ -97,7 +97,7 @@ public class BaseMigrationTasks<T extends Enum> {
void validate(Collection<BaseTask> theTasks) {
for (BaseTask task : theTasks) {
task.validateVersion();
String version = task.getFlywayVersion();
String version = task.getMigrationVersion();
MigrationVersion migrationVersion = MigrationVersion.fromVersion(version);
if (lastVersion != null) {
if (migrationVersion.compareTo(lastVersion) <= 0) {

View File

@ -1,31 +0,0 @@
import org.flywaydb.core.internal.database.DatabaseType;
import org.flywaydb.core.internal.database.DatabaseTypeRegister;
import org.junit.jupiter.api.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class FlywayDbCompatibilityTest {
@Test
public void testFlywayDbCompatibility() {
DatabaseType h2Type = DatabaseTypeRegister.getDatabaseTypeForUrl("jdbc:h2:mem:test");
assertThat(h2Type.getName(), is(equalTo("H2")));
DatabaseType sqlServerType = DatabaseTypeRegister.getDatabaseTypeForUrl("jdbc:sqlserver://localhost:1433;database=test");
assertThat(sqlServerType.getName(), is(equalTo("Azure Synapse")));
DatabaseType oracleType = DatabaseTypeRegister.getDatabaseTypeForUrl("jdbc:oracle:thin:@//host:port/service");
assertThat(oracleType.getName(), is(equalTo("Oracle")));
DatabaseType mySqlType = DatabaseTypeRegister.getDatabaseTypeForUrl("jdbc:mysql://localhost:3306/cdr?serverTimezone=Canada/Eastern");
assertThat(mySqlType.getName(), is(equalTo("MySQL")));
DatabaseType postgresType = DatabaseTypeRegister.getDatabaseTypeForUrl("jdbc:postgresql://localhost:5432/cdr");
assertThat(postgresType.getName(), is(equalTo("CockroachDB")));
DatabaseType mariaDbType = DatabaseTypeRegister.getDatabaseTypeForUrl("jdbc:mariadb://localhost:3306/cdr");
assertThat(mariaDbType.getName(), is(equalTo("MariaDB")));
}
}

View File

@ -0,0 +1,39 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import org.apache.commons.dbcp2.BasicDataSource;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import javax.sql.DataSource;
public abstract class BaseMigrationTest {
private static final String TABLE_NAME = "TEST_MIGRATION_TABLE";
protected static HapiMigrationDao ourHapiMigrationDao;
protected static HapiMigrationStorageSvc ourHapiMigrationStorageSvc;
@BeforeAll
public static void beforeAll() {
ourHapiMigrationDao = new HapiMigrationDao(getDataSource(), DriverTypeEnum.H2_EMBEDDED, TABLE_NAME);
ourHapiMigrationDao.createMigrationTableIfRequired();
ourHapiMigrationStorageSvc = new HapiMigrationStorageSvc(ourHapiMigrationDao);
}
private static DataSource getDataSource() {
BasicDataSource retVal = new BasicDataSource();
retVal.setDriver(new org.h2.Driver());
retVal.setUrl("jdbc:h2:mem:test_migration");
retVal.setMaxWaitMillis(30000);
retVal.setUsername("");
retVal.setPassword("");
retVal.setMaxTotal(5);
return retVal;
}
@AfterEach
void after() {
ourHapiMigrationDao.deleteAll();
}
}

View File

@ -1,120 +0,0 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.ISchemaInitializationProvider;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.flywaydb.core.api.migration.Context;
import org.flywaydb.core.internal.database.DatabaseTypeRegister;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.sql.SQLException;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class FlywayMigrationTaskTest {
TestTask myTestTask = new TestTask();
@Mock
private FlywayMigrator myFlywayMigrator;
@Mock
private Context myContext;
@Test
public void schemaInitializedStubsFollowingMigration() {
when(myFlywayMigrator.isSchemaWasInitialized()).thenReturn(true);
FlywayMigrationTask task = new FlywayMigrationTask(myTestTask, myFlywayMigrator);
task.migrate(myContext);
assertTrue(myTestTask.isDoNothing());
}
@Test
public void schemaNotInitializedStubsFollowingMigration() {
when(myFlywayMigrator.isSchemaWasInitialized()).thenReturn(false);
FlywayMigrationTask task = new FlywayMigrationTask(myTestTask, myFlywayMigrator);
task.migrate(myContext);
assertFalse(myTestTask.isDoNothing());
}
@Test
public void schemaInitializedStubsFollowingMigrationExceptInitSchemaTask() {
when(myFlywayMigrator.isSchemaWasInitialized()).thenReturn(true);
InitializeSchemaTask initSchemaTask = new TestInitializeSchemaTask(false);
FlywayMigrationTask task = new FlywayMigrationTask(initSchemaTask, myFlywayMigrator);
task.migrate(myContext);
assertFalse(myTestTask.isDoNothing());
}
@Test
public void schemaInitializedSetsInitializedFlag() {
InitializeSchemaTask initSchemaTask = new TestInitializeSchemaTask(true);
FlywayMigrationTask task = new FlywayMigrationTask(initSchemaTask, myFlywayMigrator);
task.migrate(myContext);
verify(myFlywayMigrator, times(1)).setSchemaWasInitialized(true);
}
@Test
public void nonInitSchemaInitializedSetsInitializedFlag() {
InitializeSchemaTask initSchemaTask = new TestInitializeSchemaTask(false);
FlywayMigrationTask task = new FlywayMigrationTask(initSchemaTask, myFlywayMigrator);
task.migrate(myContext);
verify(myFlywayMigrator, never()).setSchemaWasInitialized(true);
}
// Can't use @Mock since BaseTask.equals is final
private class TestTask extends BaseTask {
protected TestTask() {
super("1", "1");
}
@Override
public void validate() {
// do nothing
}
@Override
protected void doExecute() throws SQLException {
// do nothing
}
@Override
protected void generateHashCode(HashCodeBuilder theBuilder) {
// do nothing
}
@Override
protected void generateEquals(EqualsBuilder theBuilder, BaseTask theOtherObject) {
// do nothing
}
}
private class TestInitializeSchemaTask extends InitializeSchemaTask {
private final boolean myInitializedSchema;
public TestInitializeSchemaTask(boolean theInitializedSchema) {
super("1", "1", mock(ISchemaInitializationProvider.class));
myInitializedSchema = theInitializedSchema;
}
@Override
public void execute() throws SQLException {
// nothing
}
@Override
public boolean initializedSchema() {
return myInitializedSchema;
}
}
}

View File

@ -0,0 +1,108 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.DropTableTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
import org.flywaydb.core.api.MigrationVersion;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.jupiter.api.Assertions.assertEquals;
class HapiMigrationStorageSvcTest extends BaseMigrationTest {
private static final String RELEASE = "V5_5_0";
private static final String RELEASE_VERSION_PREFIX = "5.5.0.";
public static final String FAILED_VERSION = "20210722.3";
public static final String LAST_TASK_VERSION = RELEASE_VERSION_PREFIX + FAILED_VERSION;
public static final String LAST_SUCCEEDED_VERSION = "20210722.2";
@Test
void diff_oneNew_returnsNew() {
createTasks();
Set<MigrationVersion> appliedMigrations = ourHapiMigrationStorageSvc.fetchAppliedMigrationVersions();
assertThat(appliedMigrations, hasSize(6));
MigrationTaskList taskList = buildTasks();
String version = "20210722.4";
BaseTask dropTableTask = new DropTableTask(RELEASE, version);
taskList.add(dropTableTask);
MigrationTaskList notAppliedYet = ourHapiMigrationStorageSvc.diff(taskList);
assertEquals(2, notAppliedYet.size());
List<BaseTask> notAppliedTasks = new ArrayList<>();
notAppliedYet.forEach(notAppliedTasks::add);
assertEquals(RELEASE_VERSION_PREFIX + FAILED_VERSION, notAppliedTasks.get(0).getMigrationVersion());
assertEquals(RELEASE_VERSION_PREFIX + version, notAppliedTasks.get(1).getMigrationVersion());
}
@Test
void getLatestAppliedVersion_empty_unknown() {
String latest = ourHapiMigrationStorageSvc.getLatestAppliedVersion();
assertEquals(HapiMigrationStorageSvc.UNKNOWN_VERSION, latest);
}
@Test
void getLatestAppliedVersion_full_last() {
String latest = ourHapiMigrationStorageSvc.getLatestAppliedVersion();
assertEquals(HapiMigrationStorageSvc.UNKNOWN_VERSION, latest);
createTasks();
String newLatest = ourHapiMigrationStorageSvc.getLatestAppliedVersion();
assertEquals(RELEASE_VERSION_PREFIX + LAST_SUCCEEDED_VERSION, newLatest);
}
void createTasks() {
MigrationTaskList taskList = buildTasks();
assertEquals(7, taskList.size());
taskList.forEach(task -> {
HapiMigrationEntity entity = HapiMigrationEntity.fromBaseTask(task);
entity.setExecutionTime(1);
entity.setSuccess(!LAST_TASK_VERSION.equals(task.getMigrationVersion()));
ourHapiMigrationDao.save(entity);
});
}
MigrationTaskList buildTasks() {
MigrationTaskList taskList = new MigrationTaskList();
Builder version = forVersion(taskList);
Builder.BuilderAddTableByColumns cmpToks = version
.addTableByColumns("20210720.3", "HFJ_IDX_CMB_TOK_NU", "PID");
cmpToks.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG);
cmpToks.addColumn("RES_ID").nonNullable().type(ColumnTypeEnum.LONG);
cmpToks.addColumn("HASH_COMPLETE").nonNullable().type(ColumnTypeEnum.LONG);
cmpToks.addColumn("IDX_STRING").nonNullable().type(ColumnTypeEnum.STRING, 500);
cmpToks.addForeignKey("20210720.4", "FK_IDXCMBTOKNU_RES_ID").toColumn("RES_ID").references("HFJ_RESOURCE", "RES_ID");
cmpToks.addIndex("20210720.5", "IDX_IDXCMBTOKNU_STR").unique(false).withColumns("IDX_STRING");
cmpToks.addIndex("20210720.6", "IDX_IDXCMBTOKNU_RES").unique(false).withColumns("RES_ID");
Builder.BuilderWithTableName cmbTokNuTable = version.onTable("HFJ_IDX_CMB_TOK_NU");
cmbTokNuTable.addColumn("20210722.1", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT);
cmbTokNuTable.addColumn(LAST_SUCCEEDED_VERSION, "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY);
cmbTokNuTable.modifyColumn(FAILED_VERSION, "RES_ID").nullable().withType(ColumnTypeEnum.LONG);
return taskList;
}
public Builder forVersion(MigrationTaskList theTaskList) {
BaseMigrationTasks.IAcceptsTasks sink = theTask -> {
theTask.validate();
theTaskList.add(theTask);
};
return new Builder(RELEASE, sink);
}
}

View File

@ -16,7 +16,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
public class MigrationTaskSkipperTest {
public static final String RELEASE = "4_1_0";
public static final String RELEASE = "4.1.0";
public static final String DATE_PREFIX = "20191214.";
private static final String VERSION_PREFIX = RELEASE + "." + DATE_PREFIX;
private List<BaseTask> myTasks;
@ -104,7 +104,7 @@ public class MigrationTaskSkipperTest {
private void assertSkipped(List<BaseTask> theTasks, Integer... theVersions) {
Set<String> expectedVersions = integersToVersions(theVersions).collect(Collectors.toSet());
Set<String> taskVersions = theTasks.stream().filter(BaseTask::isDoNothing).map(BaseTask::getFlywayVersion).collect(Collectors.toSet());
Set<String> taskVersions = theTasks.stream().filter(BaseTask::isDoNothing).map(BaseTask::getMigrationVersion).collect(Collectors.toSet());
assertThat(taskVersions, equalTo(expectedVersions));
}

View File

@ -2,26 +2,27 @@ package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import ca.uhn.fhir.jpa.migrate.taskdef.AddTableRawSqlTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTest;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.flywaydb.core.api.FlywayException;
import org.hamcrest.Matchers;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import javax.annotation.Nonnull;
import java.sql.SQLException;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.function.Supplier;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.startsWith;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
@ -54,39 +55,34 @@ public class SchemaMigratorTest extends BaseTest {
@MethodSource("data")
public void testRepairFailedMigration(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
before(theTestDatabaseDetails);
SchemaMigrator schemaMigrator = createSchemaMigrator("SOMETABLE", "create fable SOMETABLE (PID bigint not null, TEXTCOL varchar(255))", "1");
try {
schemaMigrator.migrate();
fail();
} catch (FlywayException e) {
} catch (HapiMigrationException e) {
assertEquals(org.springframework.jdbc.BadSqlGrammarException.class, e.getCause().getCause().getClass());
MigrationResult failedResult = e.getResult();
assertEquals(0, failedResult.changes);
assertEquals(0, failedResult.succeededTasks.size());
assertEquals(1, failedResult.failedTasks.size());
assertEquals(0, failedResult.executedStatements.size());
assertThat(myHapiMigrationDao.findAll(), hasSize(1));
}
schemaMigrator = createTableMigrator();
schemaMigrator.migrate();
}
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("data")
public void testOutOfOrderMigration(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
before(theTestDatabaseDetails);
MigrationResult result = schemaMigrator.migrate();
assertEquals(0, result.changes);
assertEquals(1, result.succeededTasks.size());
assertEquals(0, result.failedTasks.size());
assertEquals(1, result.executedStatements.size());
SchemaMigrator schemaMigrator = createSchemaMigrator("SOMETABLE", "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))", "2");
schemaMigrator.migrate();
AddTableRawSqlTask task1 = createAddTableTask("SOMEOTHERTABLE", "create table SOMEOTHERTABLE (PID bigint not null, TEXTCOL varchar(255))", "1");
AddTableRawSqlTask task2 = createAddTableTask("SOMETABLE", "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))", "2");
schemaMigrator = createSchemaMigrator(task1, task2);
schemaMigrator.setStrictOrder(true);
try {
schemaMigrator.migrate();
fail();
} catch (FlywayException e) {
assertThat(e.getMessage(), containsString("Detected resolved migration not applied to database: 1.1"));
}
schemaMigrator.setStrictOrder(false);
schemaMigrator.migrate();
List<HapiMigrationEntity> entities = myHapiMigrationDao.findAll();
assertThat(entities, hasSize(2));
assertEquals(1, entities.get(0).getPid());
assertEquals(false, entities.get(0).getSuccess());
assertEquals(2, entities.get(1).getPid());
assertEquals(true, entities.get(1).getSuccess());
}
@ParameterizedTest(name = "{index}: {0}")
@ -110,9 +106,9 @@ public class SchemaMigratorTest extends BaseTest {
taskD.setTableName("SOMETABLE_D");
taskD.addSql(getDriverType(), "create table SOMETABLE_D (PID bigint not null, TEXTCOL varchar(255))");
ImmutableList<BaseTask> taskList = ImmutableList.of(taskA, taskB, taskC, taskD);
MigrationTaskSkipper.setDoNothingOnSkippedTasks(taskList, "4_1_0.20191214.2, 4_1_0.20191214.4");
SchemaMigrator schemaMigrator = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), taskList);
MigrationTaskList taskList = new MigrationTaskList(ImmutableList.of(taskA, taskB, taskC, taskD));
taskList.setDoNothingOnSkippedTasks("4.1.0.20191214.2, 4.1.0.20191214.4");
SchemaMigrator schemaMigrator = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), taskList, myHapiMigrationStorageSvc);
schemaMigrator.setDriverType(getDriverType());
schemaMigrator.migrate();
@ -122,28 +118,6 @@ public class SchemaMigratorTest extends BaseTest {
assertThat(tableNames, Matchers.containsInAnyOrder("SOMETABLE_A", "SOMETABLE_C"));
}
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("data")
public void testMigrationRequiredNoFlyway(Supplier<TestDatabaseDetails> theTestDatabaseDetails) throws SQLException {
before(theTestDatabaseDetails);
SchemaMigrator schemaMigrator = createTableMigrator();
schemaMigrator.setDriverType(getDriverType());
schemaMigrator.setDontUseFlyway(true);
// Validate shouldn't fail if we aren't using Flyway
schemaMigrator.validate();
schemaMigrator.migrate();
schemaMigrator.validate();
DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getDataSource().getUrl(), getDataSource().getUsername(), getDataSource().getPassword());
Set<String> tableNames = JdbcUtils.getTableNames(connectionProperties);
assertThat(tableNames, Matchers.contains("SOMETABLE"));
}
@Nonnull
private SchemaMigrator createTableMigrator() {
return createSchemaMigrator("SOMETABLE", "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))", "1");
@ -157,7 +131,8 @@ public class SchemaMigratorTest extends BaseTest {
@Nonnull
private SchemaMigrator createSchemaMigrator(BaseTask... tasks) {
SchemaMigrator retVal = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), Lists.newArrayList(tasks));
MigrationTaskList taskList = new MigrationTaskList(Lists.newArrayList(tasks));
SchemaMigrator retVal = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), taskList, myHapiMigrationStorageSvc);
retVal.setDriverType(getDriverType());
return retVal;
}

View File

@ -0,0 +1,50 @@
package ca.uhn.fhir.jpa.migrate.dao;
import ca.uhn.fhir.jpa.migrate.BaseMigrationTest;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import org.flywaydb.core.api.MigrationVersion;
import org.junit.jupiter.api.Test;
import java.util.Set;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.jupiter.api.Assertions.assertEquals;
class HapiMigrationDaoTest extends BaseMigrationTest {
@Test
public void findAll_empty_returnsNothing() {
Set<MigrationVersion> result = ourHapiMigrationDao.fetchSuccessfulMigrationVersions();
assertThat(result, hasSize(0));
}
@Test
public void findAll_2records_returnsBoth() {
HapiMigrationEntity record1 = buildEntity("DESC1", "1.1");
HapiMigrationEntity result1 = ourHapiMigrationDao.save(record1);
assertEquals(1, result1.getPid());
{
Set<MigrationVersion> all = ourHapiMigrationDao.fetchSuccessfulMigrationVersions();
assertThat(all, hasSize(1));
}
HapiMigrationEntity record2 = buildEntity("DESC2", "1.2");
HapiMigrationEntity result2 = ourHapiMigrationDao.save(record2);
assertEquals(2, result2.getPid());
{
Set<MigrationVersion> all = ourHapiMigrationDao.fetchSuccessfulMigrationVersions();
assertThat(all, hasSize(2));
}
}
private HapiMigrationEntity buildEntity(String theDesc, String theVersion) {
HapiMigrationEntity retval = new HapiMigrationEntity();
retval.setVersion(theVersion);
retval.setDescription(theDesc);
retval.setExecutionTime(1);
retval.setSuccess(true);
return retval;
}
}

View File

@ -1,9 +1,9 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import org.flywaydb.core.internal.command.DbMigrate;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
@ -12,6 +12,7 @@ import java.util.function.Supplier;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
@ -87,12 +88,12 @@ public class AddColumnTest extends BaseTest {
.nullable()
.type(ColumnTypeEnum.INT);
getMigrator().addTasks(tasks.getTasks(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().addTasks(tasks.getTaskList(VersionEnum.V0_1, VersionEnum.V4_0_0));
try {
getMigrator().migrate();
fail();
} catch (DbMigrate.FlywayMigrateException e) {
assertEquals("Migration failed !", e.getMessage());
} catch (HapiMigrationException e) {
assertThat(e.getMessage(), startsWith("HAPI-0047: Failure executing task \"Add column FOO_COLUMN on table FOO_TABLE\", aborting! Cause: ca.uhn.fhir.jpa.migrate.HapiMigrationException: HAPI-0061: Failed during task 4.0.0.2001.01: "));
}
}
@ -111,7 +112,7 @@ public class AddColumnTest extends BaseTest {
.type(ColumnTypeEnum.INT)
.failureAllowed();
getMigrator().addTasks(tasks.getTasks(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().addTasks(tasks.getTaskList(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().migrate();
}

View File

@ -25,14 +25,14 @@ public class AddIdGeneratorTaskTest extends BaseTest {
assertThat(JdbcUtils.getSequenceNames(getConnectionProperties()), empty());
MyMigrationTasks migrationTasks = new MyMigrationTasks("123456.7");
getMigrator().addTasks(migrationTasks.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().addTasks(migrationTasks.getTaskList(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().migrate();
assertThat(JdbcUtils.getSequenceNames(getConnectionProperties()), containsInAnyOrder("SEQ_FOO"));
// Second time, should produce no action
migrationTasks = new MyMigrationTasks("123456.8");
getMigrator().addTasks(migrationTasks.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().addTasks(migrationTasks.getTaskList(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().migrate();
assertThat(JdbcUtils.getSequenceNames(getConnectionProperties()), containsInAnyOrder("SEQ_FOO"));

View File

@ -24,7 +24,7 @@ public class AddTableByColumnTaskTest extends BaseTest {
before(theTestDatabaseDetails);
MyMigrationTasks migrator = new MyMigrationTasks();
getMigrator().addTasks(migrator.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().addTasks(migrator.getTaskList(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().migrate();
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), containsInAnyOrder("FOO_TABLE", "TGT_TABLE"));

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.MigrationTaskList;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import org.junit.jupiter.params.ParameterizedTest;
@ -97,7 +98,7 @@ public class ArbitrarySqlTaskTest extends BaseTest {
.addTableRawSql("1", "A")
.addSql("delete from TEST_UPDATE_TASK where RES_TYPE = 'Patient'");
getMigrator().addTasks(migrator.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().addTasks(migrator.getTaskList(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().migrate();
rows = executeQuery("select * from TEST_UPDATE_TASK");
@ -125,9 +126,9 @@ public class ArbitrarySqlTaskTest extends BaseTest {
.executeRawSql("1", getDriverType(), "delete from TEST_UPDATE_TASK where RES_TYPE = 'Patient'")
.executeRawSql("2", getDriverType(), "delete from TEST_UPDATE_TASK where RES_TYPE = 'Encounter'");
List<BaseTask> tasks = migrator.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0);
ourLog.info("Have tasks: {}", tasks);
getMigrator().addTasks(tasks);
MigrationTaskList taskList = migrator.getTaskList(VersionEnum.V3_3_0, VersionEnum.V3_6_0);
ourLog.info("Have tasks: {}", taskList);
getMigrator().addTasks(taskList);
getMigrator().migrate();
rows = executeQuery("select * from TEST_UPDATE_TASK");

View File

@ -1,9 +1,11 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
import ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc;
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import org.apache.commons.dbcp2.BasicDataSource;
import org.intellij.lang.annotations.Language;
import org.junit.jupiter.api.AfterEach;
@ -27,11 +29,12 @@ public abstract class BaseTest {
private static int ourDatabaseUrl = 0;
private BasicDataSource myDataSource;
private String myUrl;
private FlywayMigrator myMigrator;
private HapiMigrator myMigrator;
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
protected HapiMigrationDao myHapiMigrationDao;
protected HapiMigrationStorageSvc myHapiMigrationStorageSvc;
public static Stream<Supplier<TestDatabaseDetails>> data() {
ourLog.info("H2: {}", org.h2.Driver.class.toString());
ArrayList<Supplier<TestDatabaseDetails>> retVal = new ArrayList<>();
@ -39,6 +42,7 @@ public abstract class BaseTest {
retVal.add(new Supplier<TestDatabaseDetails>() {
@Override
public TestDatabaseDetails get() {
ourLog.info("H2: {}", org.h2.Driver.class.toString());
String url = "jdbc:h2:mem:" + DATABASE_NAME + ourDatabaseUrl++;
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "SA", "SA");
BasicDataSource dataSource = new BasicDataSource();
@ -46,7 +50,7 @@ public abstract class BaseTest {
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.H2_EMBEDDED.getDriverClassName());
FlywayMigrator migrator = new FlywayMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.H2_EMBEDDED);
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.H2_EMBEDDED);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@ -60,6 +64,8 @@ public abstract class BaseTest {
retVal.add(new Supplier<TestDatabaseDetails>() {
@Override
public TestDatabaseDetails get() {
ourLog.info("Derby: {}", DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
String url = "jdbc:derby:memory:" + DATABASE_NAME + ourDatabaseUrl++ + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "SA", "SA");
BasicDataSource dataSource = new BasicDataSource();
@ -67,7 +73,7 @@ public abstract class BaseTest {
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
FlywayMigrator migrator = new FlywayMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@ -86,6 +92,10 @@ public abstract class BaseTest {
myConnectionProperties = testDatabaseDetails.myConnectionProperties;
myDataSource = testDatabaseDetails.myDataSource;
myMigrator = testDatabaseDetails.myMigrator;
myHapiMigrationDao = new HapiMigrationDao(testDatabaseDetails.myDataSource, testDatabaseDetails.getDriverType(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME);
myHapiMigrationDao.createMigrationTableIfRequired();
myHapiMigrationStorageSvc = new HapiMigrationStorageSvc(myHapiMigrationDao);
}
public String getUrl() {
@ -123,7 +133,7 @@ public abstract class BaseTest {
});
}
public FlywayMigrator getMigrator() {
public HapiMigrator getMigrator() {
return myMigrator;
}
@ -143,9 +153,9 @@ public abstract class BaseTest {
private final String myUrl;
private final DriverTypeEnum.ConnectionProperties myConnectionProperties;
private final BasicDataSource myDataSource;
private final FlywayMigrator myMigrator;
private final HapiMigrator myMigrator;
public TestDatabaseDetails(String theUrl, DriverTypeEnum.ConnectionProperties theConnectionProperties, BasicDataSource theDataSource, FlywayMigrator theMigrator) {
public TestDatabaseDetails(String theUrl, DriverTypeEnum.ConnectionProperties theConnectionProperties, BasicDataSource theDataSource, HapiMigrator theMigrator) {
myUrl = theUrl;
myConnectionProperties = theConnectionProperties;
myDataSource = theDataSource;

View File

@ -26,7 +26,7 @@ public class DropIdGeneratorTaskTest extends BaseTest {
assertThat(JdbcUtils.getSequenceNames(getConnectionProperties()), containsInAnyOrder("SEQ_FOO"));
MyMigrationTasks migrator = new MyMigrationTasks();
getMigrator().addTasks(migrator.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().addTasks(migrator.getTaskList(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().migrate();
assertThat(JdbcUtils.getSequenceNames(getConnectionProperties()), empty());

View File

@ -1,6 +1,7 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.MigrationResult;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
@ -13,6 +14,7 @@ import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.core.IsNot.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class DropTableTest extends BaseTest {
@ -73,7 +75,7 @@ public class DropTableTest extends BaseTest {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("data")
public void testFlywayGetMigrationInfo(Supplier<TestDatabaseDetails> theTestDatabaseDetails) throws SQLException {
public void testHapiMigrationResult(Supplier<TestDatabaseDetails> theTestDatabaseDetails) throws SQLException {
before(theTestDatabaseDetails);
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
@ -84,9 +86,11 @@ public class DropTableTest extends BaseTest {
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), (hasItems("SOMETABLE")));
assertThat(getMigrator().getMigrationInfo().get().pending().length, greaterThan(0));
getMigrator().migrate();
assertThat(getMigrator().getMigrationInfo().get().pending().length, equalTo(0));
MigrationResult result = getMigrator().migrate();
assertEquals(0, result.changes);
assertEquals(1, result.executedStatements.size());
assertEquals(1, result.succeededTasks.size());
assertEquals(0, result.failedTasks.size());
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), not(hasItem("SOMETABLE")));
}

View File

@ -28,7 +28,7 @@ public class ExecuteRawSqlTaskTest extends BaseTest {
.forVersion(VersionEnum.V4_0_0)
.executeRawSql("2001.01", "INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (123, 'abc')");
getMigrator().addTasks(tasks.getTasks(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().addTasks(tasks.getTaskList(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().migrate();
List<Map<String, Object>> output = executeQuery("SELECT PID FROM SOMETABLE");
@ -49,7 +49,7 @@ public class ExecuteRawSqlTaskTest extends BaseTest {
.executeRawSql("2001.01", "INSERT INTO SOMETABLE (PID_BAD_COLUMN, TEXTCOL) VALUES (123, 'abc')")
.failureAllowed();
getMigrator().addTasks(tasks.getTasks(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().addTasks(tasks.getTaskList(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().migrate();
List<Map<String, Object>> output = executeQuery("SELECT PID FROM SOMETABLE");
@ -70,7 +70,7 @@ public class ExecuteRawSqlTaskTest extends BaseTest {
.executeRawSql("2001.01", "INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (123, 'abc')")
.onlyAppliesToPlatforms(DriverTypeEnum.H2_EMBEDDED);
getMigrator().addTasks(tasks.getTasks(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().addTasks(tasks.getTaskList(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().migrate();
List<Map<String, Object>> output = executeQuery("SELECT PID FROM SOMETABLE");
@ -99,7 +99,7 @@ public class ExecuteRawSqlTaskTest extends BaseTest {
.forVersion(VersionEnum.V4_0_0)
.executeRawSql("2001.01", driverToSql);
getMigrator().addTasks(tasks.getTasks(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().addTasks(tasks.getTaskList(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().migrate();
List<Map<String, Object>> output = executeQuery("SELECT PID,TEXTCOL FROM SOMETABLE");
@ -128,7 +128,7 @@ public class ExecuteRawSqlTaskTest extends BaseTest {
tasks.forVersion(VersionEnum.V4_0_0)
.executeRawSqlStub("2001.01", "INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (123, 'abc')");
getMigrator().addTasks(tasks.getTasks(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().addTasks(tasks.getTaskList(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().migrate();
List<Map<String, Object>> output = executeQuery("SELECT PID,TEXTCOL FROM SOMETABLE");

View File

@ -1,8 +1,8 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.flywaydb.core.internal.command.DbMigrate;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
@ -300,7 +300,7 @@ public class ModifyColumnTest extends BaseTest {
try {
getMigrator().migrate();
fail();
} catch (DbMigrate.FlywayMigrateException e) {
} catch (HapiMigrationException e) {
// expected
}

View File

@ -1,8 +1,8 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.flywaydb.core.api.FlywayException;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
@ -166,8 +166,8 @@ public class RenameColumnTaskTest extends BaseTest {
try {
getMigrator().migrate();
fail();
} catch (FlywayException e) {
assertEquals(Msg.code(47) + "Failure executing task \"Drop an index\", aborting! Cause: java.sql.SQLException: "+ Msg.code(54) + "Can not rename SOMETABLE.myTextCol to TEXTCOL because both columns exist and data exists in TEXTCOL", e.getCause().getCause().getMessage());
} catch (HapiMigrationException e) {
assertEquals(Msg.code(47) + "Failure executing task \"Drop an index\", aborting! Cause: java.sql.SQLException: "+ Msg.code(54) + "Can not rename SOMETABLE.myTextCol to TEXTCOL because both columns exist and data exists in TEXTCOL", e.getMessage());
}
}
@ -249,8 +249,8 @@ public class RenameColumnTaskTest extends BaseTest {
try {
getMigrator().migrate();
fail();
} catch (FlywayException e) {
assertEquals(Msg.code(47) + "Failure executing task \"RenameColumnTask\", aborting! Cause: java.sql.SQLException: "+ Msg.code(56) + "Can not rename SOMETABLE.myTextCol to TEXTCOL because neither column exists!", e.getCause().getCause().getMessage());
} catch (HapiMigrationException e) {
assertEquals(Msg.code(47) + "Failure executing task \"RenameColumnTask\", aborting! Cause: java.sql.SQLException: "+ Msg.code(56) + "Can not rename SOMETABLE.myTextCol to TEXTCOL because neither column exists!", e.getMessage());
}
@ -289,8 +289,8 @@ public class RenameColumnTaskTest extends BaseTest {
try {
getMigrator().migrate();
fail();
} catch (FlywayException e) {
assertEquals(Msg.code(47) + "Failure executing task \"RenameColumnTask\", aborting! Cause: java.sql.SQLException: "+ Msg.code(55) + "Can not rename SOMETABLE.PID to PID2 because both columns exist!", e.getCause().getCause().getMessage());
} catch (HapiMigrationException e) {
assertEquals(Msg.code(47) + "Failure executing task \"RenameColumnTask\", aborting! Cause: java.sql.SQLException: "+ Msg.code(55) + "Can not rename SOMETABLE.PID to PID2 because both columns exist!", e.getMessage());
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
@ -82,7 +82,7 @@
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-dstu2</artifactId>
<version>5.2.0</version>
<version>${project.version}</version>
<scope>compile</scope>
</dependency>
<dependency>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE11-SNAPSHOT</version>
<version>6.2.0-PRE12-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

Some files were not shown because too many files have changed in this diff Show More