removed flyway, started coding persistence

This commit is contained in:
Ken Stevens 2022-09-16 20:45:09 -04:00
parent c3339f38c3
commit db3b0f9417
31 changed files with 434 additions and 662 deletions

View File

@ -235,19 +235,6 @@
<artifactId>jansi</artifactId>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-sqlserver</artifactId>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-mysql</artifactId>
</dependency>
<!-- Test Deps -->
<dependency>
<groupId>org.awaitility</groupId>

View File

@ -21,11 +21,9 @@ package ca.uhn.fhir.cli;
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.BaseMigrator;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
import ca.uhn.fhir.jpa.migrate.MigrationTaskSkipper;
import ca.uhn.fhir.jpa.migrate.TaskOnlyMigrator;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
@ -50,7 +48,9 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
public static final String MIGRATE_DATABASE = "migrate-database";
public static final String NO_COLUMN_SHRINK = "no-column-shrink";
// FIXME KHS remove
public static final String DONT_USE_FLYWAY = "dont-use-flyway";
// FIXME KHS rewmove
public static final String STRICT_ORDER = "strict-order";
public static final String SKIP_VERSIONS = "skip-versions";
private Set<String> myFlags;
@ -119,30 +119,17 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
.filter(StringUtils::isNotBlank)
.collect(Collectors.toSet());
boolean dontUseFlyway = theCommandLine.hasOption(BaseFlywayMigrateDatabaseCommand.DONT_USE_FLYWAY);
boolean strictOrder = theCommandLine.hasOption(BaseFlywayMigrateDatabaseCommand.STRICT_ORDER);
BaseMigrator migrator;
if (dontUseFlyway || dryRun) {
// Flyway dryrun is not available in community edition
migrator = new TaskOnlyMigrator();
} else {
migrator = new FlywayMigrator(myMigrationTableName);
}
DriverTypeEnum.ConnectionProperties connectionProperties = driverType.newConnectionProperties(url, username, password);
HapiMigrator migrator = new HapiMigrator(driverType, connectionProperties.getDataSource(), myMigrationTableName);
migrator.setDataSource(connectionProperties.getDataSource());
migrator.setDriverType(driverType);
migrator.setDryRun(dryRun);
migrator.setNoColumnShrink(noColumnShrink);
migrator.setStrictOrder(strictOrder);
String skipVersions = theCommandLine.getOptionValue(BaseFlywayMigrateDatabaseCommand.SKIP_VERSIONS);
addTasks(migrator, skipVersions);
migrator.migrate();
}
protected abstract void addTasks(BaseMigrator theMigrator, String theSkippedVersions);
protected abstract void addTasks(HapiMigrator theMigrator, String theSkippedVersions);
public void setMigrationTableName(String theMigrationTableName) {
myMigrationTableName = theMigrationTableName;

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.cli;
* #L%
*/
import ca.uhn.fhir.jpa.migrate.BaseMigrator;
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
@ -44,7 +44,7 @@ public class HapiFlywayMigrateDatabaseCommand extends BaseFlywayMigrateDatabaseC
}
@Override
protected void addTasks(BaseMigrator theMigrator, String theSkipVersions) {
protected void addTasks(HapiMigrator theMigrator, String theSkipVersions) {
List<BaseTask> tasks = new HapiFhirJpaMigrationTasks(getFlags()).getAllTasks(VersionEnum.values());
super.setDoNothingOnSkippedTasks(tasks, theSkipVersions);
theMigrator.addTasks(tasks);

View File

@ -1,7 +1,6 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import org.apache.commons.dbcp2.BasicDataSource;
@ -27,7 +26,7 @@ public abstract class BaseTest {
private static final Logger ourLog = LoggerFactory.getLogger(BaseTest.class);
private BasicDataSource myDataSource;
private String myUrl;
private FlywayMigrator myMigrator;
private HapiMigrator myMigrator;
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
public static Stream<Supplier<TestDatabaseDetails>> data() {
@ -46,7 +45,7 @@ public abstract class BaseTest {
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.H2_EMBEDDED.getDriverClassName());
FlywayMigrator migrator = new FlywayMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.H2_EMBEDDED);
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.H2_EMBEDDED);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@ -67,7 +66,7 @@ public abstract class BaseTest {
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
FlywayMigrator migrator = new FlywayMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@ -123,7 +122,7 @@ public abstract class BaseTest {
});
}
public FlywayMigrator getMigrator() {
public HapiMigrator getMigrator() {
return myMigrator;
}
@ -143,9 +142,9 @@ public abstract class BaseTest {
private final String myUrl;
private final DriverTypeEnum.ConnectionProperties myConnectionProperties;
private final BasicDataSource myDataSource;
private final FlywayMigrator myMigrator;
private final HapiMigrator myMigrator;
public TestDatabaseDetails(String theUrl, DriverTypeEnum.ConnectionProperties theConnectionProperties, BasicDataSource theDataSource, FlywayMigrator theMigrator) {
public TestDatabaseDetails(String theUrl, DriverTypeEnum.ConnectionProperties theConnectionProperties, BasicDataSource theDataSource, HapiMigrator theMigrator) {
myUrl = theUrl;
myConnectionProperties = theConnectionProperties;
myDataSource = theDataSource;

View File

@ -36,14 +36,14 @@ public class HashTest {
List<BaseTask> tasks1 = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(VersionEnum.values());
Map<String, Integer> hashesByVersion = new HashMap<>();
for (BaseTask task : tasks1) {
String version = task.getFlywayVersion();
String version = task.getMigrationVersion();
assertNull(hashesByVersion.get(version), "Duplicate flyway version " + version + " in " + HapiFhirJpaMigrationTasks.class.getName());
hashesByVersion.put(version, task.hashCode());
}
List<BaseTask> tasks2 = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(VersionEnum.values());
for (BaseTask task : tasks2) {
String version = task.getFlywayVersion();
String version = task.getMigrationVersion();
int origHash = hashesByVersion.get(version);
assertEquals(origHash, task.hashCode(), "Hashes differ for task " + version);
}

View File

@ -68,24 +68,33 @@
<artifactId>annotations</artifactId>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-sqlserver</artifactId>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-mysql</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-commons</artifactId>
<artifactId>spring-data-jpa</artifactId>
</dependency>
<!-- test -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>test</scope>
</dependency>
<!--
These have been added as explicit dependencies
as JDK9 no longer includes them by default
-->
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>${jaxb_api_version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -1,113 +0,0 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.flywaydb.core.api.MigrationVersion;
import org.flywaydb.core.api.migration.Context;
import org.flywaydb.core.api.migration.JavaMigration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import static org.apache.commons.lang3.StringUtils.isBlank;
public class FlywayMigrationTask implements JavaMigration {
private static final Logger ourLog = LoggerFactory.getLogger(FlywayMigrationTask.class);
private final BaseTask myTask;
private final FlywayMigrator myFlywayMigrator;
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
public FlywayMigrationTask(BaseTask theTask, FlywayMigrator theFlywayMigrator) {
myTask = theTask;
myFlywayMigrator = theFlywayMigrator;
}
@Override
public MigrationVersion getVersion() {
return MigrationVersion.fromVersion(myTask.getFlywayVersion());
}
@Override
public String getDescription() {
return myTask.getDescription();
}
@Override
public Integer getChecksum() {
return myTask.hashCode();
}
@Override
public boolean isUndo() {
return false;
}
@Override
public boolean isBaselineMigration() {
return false;
}
@Override
public boolean canExecuteInTransaction() {
return false;
}
@Override
public void migrate(Context theContext) {
myTask.setDriverType(myFlywayMigrator.getDriverType());
myTask.setDryRun(myFlywayMigrator.isDryRun());
myTask.setNoColumnShrink(myFlywayMigrator.isNoColumnShrink());
myTask.setConnectionProperties(myConnectionProperties);
try {
executeTask();
} catch (SQLException e) {
String description = myTask.getDescription();
if (isBlank(description)) {
description = myTask.getClass().getSimpleName();
}
String prefix = "Failure executing task \"" + description + "\", aborting! Cause: ";
throw new InternalErrorException(Msg.code(47) + prefix + e.toString(), e);
}
}
private void executeTask() throws SQLException {
if (myFlywayMigrator.isSchemaWasInitialized() && !myTask.isRunDuringSchemaInitialization()) {
// Empty schema was initialized, stub out this non-schema-init task since we're starting with a fully migrated schema
myTask.setDoNothing(true);
}
myTask.execute();
if (myTask.initializedSchema()) {
ourLog.info("Empty schema was Initialized. Stubbing out all following migration tasks that are not Schema Initializations.");
myFlywayMigrator.setSchemaWasInitialized(true);
}
myFlywayMigrator.addExecutedStatements(myTask.getExecutedStatements());
}
public void setConnectionProperties(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
myConnectionProperties = theConnectionProperties;
}
}

View File

@ -1,114 +0,0 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask;
import com.google.common.annotations.VisibleForTesting;
import org.flywaydb.core.Flyway;
import org.flywaydb.core.api.MigrationInfoService;
import org.flywaydb.core.api.callback.Callback;
import org.flywaydb.core.api.migration.JavaMigration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
public class FlywayMigrator extends BaseMigrator {
private static final Logger ourLog = LoggerFactory.getLogger(FlywayMigrator.class);
private final String myMigrationTableName;
private final List<FlywayMigrationTask> myTasks = new ArrayList<>();
public FlywayMigrator(String theMigrationTableName, DataSource theDataSource, DriverTypeEnum theDriverType) {
this(theMigrationTableName);
setDataSource(theDataSource);
setDriverType(theDriverType);
}
public FlywayMigrator(String theMigrationTableName) {
myMigrationTableName = theMigrationTableName;
}
public void addTask(BaseTask theTask) {
myTasks.add(new FlywayMigrationTask(theTask, this));
}
@Override
public void migrate() {
try (DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getDataSource())) {
Flyway flyway = initFlyway(connectionProperties);
flyway.repair();
flyway.migrate();
if (isDryRun()) {
StringBuilder statementBuilder = buildExecutedStatementsString();
ourLog.info("SQL that would be executed:\n\n***********************************\n{}***********************************", statementBuilder);
}
} catch (Exception e) {
throw e;
}
}
private Flyway initFlyway(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
Flyway flyway = Flyway.configure()
.table(myMigrationTableName)
.dataSource(theConnectionProperties.getDataSource())
.baselineOnMigrate(true)
// By default, migrations are allowed to be run out of order. You can enforce strict order by setting strictOrder=true.
.outOfOrder(!isStrictOrder())
.javaMigrations(myTasks.toArray(new JavaMigration[0]))
.callbacks(getCallbacks().toArray(new Callback[0]))
.load();
for (FlywayMigrationTask task : myTasks) {
task.setConnectionProperties(theConnectionProperties);
}
return flyway;
}
@Override
public void addTasks(List<BaseTask> theTasks) {
if ("true".equals(System.getProperty("unit_test_mode"))) {
theTasks.stream().filter(task -> task instanceof InitializeSchemaTask).forEach(this::addTask);
} else {
theTasks.forEach(this::addTask);
}
}
@Override
public Optional<MigrationInfoService> getMigrationInfo() {
if (getDriverType() == null) {
return Optional.empty();
}
try (DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getDataSource())) {
Flyway flyway = initFlyway(connectionProperties);
return Optional.of(flyway.info());
}
}
@VisibleForTesting
public void removeAllTasksForUnitTest() {
myTasks.clear();
}
}

View File

@ -0,0 +1,4 @@
package ca.uhn.fhir.jpa.migrate;
public class HapiMigrationException extends RuntimeException {
}

View File

@ -20,45 +20,45 @@ package ca.uhn.fhir.jpa.migrate;
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.apache.commons.lang3.Validate;
import org.flywaydb.core.api.callback.Callback;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
public abstract class BaseMigrator implements IMigrator {
/**
* This class is an alternative to {@link HapiMigrator ). It doesn't use Flyway, but instead just
* executes all tasks.
*/
public class HapiMigrator implements IMigrator {
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrator.class);
private List<BaseTask> myTasks = new ArrayList<>();
private final List<BaseTask.ExecutedStatement> myExecutedStatements = new ArrayList<>();
private boolean myDryRun;
private boolean myNoColumnShrink;
private boolean mySchemaWasInitialized;
private DriverTypeEnum myDriverType;
private DataSource myDataSource;
private boolean myStrictOrder;
private List<Callback> myCallbacks = Collections.emptyList();
private final DriverTypeEnum myDriverType;
private final DataSource myDataSource;
private final String myMigrationTableName;
@Nonnull
public List<Callback> getCallbacks() {
return myCallbacks;
}
public void setCallbacks(@Nonnull List<Callback> theCallbacks) {
Validate.notNull(theCallbacks);
myCallbacks = theCallbacks;
public HapiMigrator(DriverTypeEnum theDriverType, DataSource theDataSource, String theMigrationTableName) {
myDriverType = theDriverType;
myDataSource = theDataSource;
// FIXME KHS use tablename
myMigrationTableName = theMigrationTableName;
}
public DataSource getDataSource() {
return myDataSource;
}
public void setDataSource(DataSource theDataSource) {
myDataSource = theDataSource;
}
public boolean isDryRun() {
return myDryRun;
}
@ -79,17 +79,6 @@ public abstract class BaseMigrator implements IMigrator {
return myDriverType;
}
public void setDriverType(DriverTypeEnum theDriverType) {
myDriverType = theDriverType;
}
public boolean isStrictOrder() {
return myStrictOrder;
}
public void setStrictOrder(boolean theStrictOrder) {
myStrictOrder = theStrictOrder;
}
public void addExecutedStatements(List theExecutedStatements) {
myExecutedStatements.addAll(theExecutedStatements);
@ -113,12 +102,46 @@ public abstract class BaseMigrator implements IMigrator {
return statementBuilder;
}
public boolean isSchemaWasInitialized() {
return mySchemaWasInitialized;
@Override
public void migrate() {
DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getDataSource());
for (BaseTask next : myTasks) {
next.setDriverType(getDriverType());
next.setDryRun(isDryRun());
next.setNoColumnShrink(isNoColumnShrink());
next.setConnectionProperties(connectionProperties);
try {
if (isDryRun()) {
ourLog.info("Dry run {} {}", next.getMigrationVersion(), next.getDescription());
} else {
ourLog.info("Executing {} {}", next.getMigrationVersion(), next.getDescription());
}
next.execute();
addExecutedStatements(next.getExecutedStatements());
} catch (SQLException e) {
throw new InternalErrorException(Msg.code(48) + e);
}
}
if (isDryRun()) {
StringBuilder statementBuilder = buildExecutedStatementsString();
ourLog.info("SQL that would be executed:\n\n***********************************\n{}***********************************", statementBuilder);
}
}
public BaseMigrator setSchemaWasInitialized(boolean theSchemaWasInitialized) {
mySchemaWasInitialized = theSchemaWasInitialized;
return this;
@Override
public void addTasks(List<BaseTask> theMigrationTasks) {
myTasks.addAll(theMigrationTasks);
}
@Override
public void addTask(BaseTask theTask) {
myTasks.add(theTask);
}
@VisibleForTesting
public void removeAllTasksForUnitTest() {
myTasks.clear();
}
}

View File

@ -21,16 +21,14 @@ package ca.uhn.fhir.jpa.migrate;
*/
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.flywaydb.core.api.MigrationInfoService;
import java.util.List;
import java.util.Optional;
public interface IMigrator {
void migrate();
Optional<MigrationInfoService> getMigrationInfo();
void addTasks(List<BaseTask> theMigrationTasks);
void addTask(BaseTask theTask);
}

View File

@ -48,8 +48,8 @@ public class MigrationTaskSkipper {
.collect(Collectors.toSet());
for (BaseTask task : theTasks) {
if (skippedVersionSet.contains(task.getFlywayVersion())) {
ourLog.info("Will skip {}: {}", task.getFlywayVersion(), task.getDescription());
if (skippedVersionSet.contains(task.getMigrationVersion())) {
ourLog.info("Will skip {}: {}", task.getMigrationVersion(), task.getDescription());
task.setDoNothing(true);
}
}

View File

@ -20,23 +20,17 @@ package ca.uhn.fhir.jpa.migrate;
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.flywaydb.core.api.MigrationInfo;
import org.flywaydb.core.api.MigrationInfoService;
import org.flywaydb.core.api.callback.Callback;
import org.hibernate.cfg.AvailableSettings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.Assert;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.Properties;
public class SchemaMigrator {
@ -47,10 +41,8 @@ public class SchemaMigrator {
private final boolean mySkipValidation;
private final String myMigrationTableName;
private final List<BaseTask> myMigrationTasks;
private boolean myDontUseFlyway;
private boolean myStrictOrder;
private DriverTypeEnum myDriverType;
private List<Callback> myCallbacks = Collections.emptyList();
/**
* Constructor
@ -64,15 +56,6 @@ public class SchemaMigrator {
mySkipValidation = jpaProperties.containsKey(AvailableSettings.HBM2DDL_AUTO) && "update".equals(jpaProperties.getProperty(AvailableSettings.HBM2DDL_AUTO));
}
public void setCallbacks(List<Callback> theCallbacks) {
Assert.notNull(theCallbacks);
myCallbacks = theCallbacks;
}
public void setDontUseFlyway(boolean theDontUseFlyway) {
myDontUseFlyway = theDontUseFlyway;
}
public void setStrictOrder(boolean theStrictOrder) {
myStrictOrder = theStrictOrder;
}
@ -83,17 +66,17 @@ public class SchemaMigrator {
return;
}
try (Connection connection = myDataSource.getConnection()) {
Optional<MigrationInfoService> migrationInfo = newMigrator().getMigrationInfo();
if (migrationInfo.isPresent()) {
if (migrationInfo.get().pending().length > 0) {
String url = connection.getMetaData().getURL();
throw new ConfigurationException(Msg.code(27) + "The database schema for " + url + " is out of date. " +
"Current database schema version is " + getCurrentVersion(migrationInfo.get()) + ". Schema version required by application is " +
getLastVersion(migrationInfo.get()) + ". Please run the database migrator.");
}
ourLog.info("Database schema confirmed at expected version " + getCurrentVersion(migrationInfo.get()));
}
// FIXME KHS
// if (migrationInfo.isPresent()) {
// if (migrationInfo.get().pending().length > 0) {
//
// String url = connection.getMetaData().getURL();
// throw new ConfigurationException(Msg.code(27) + "The database schema for " + url + " is out of date. " +
// "Current database schema version is " + getCurrentVersion(migrationInfo.get()) + ". Schema version required by application is " +
// getLastVersion(migrationInfo.get()) + ". Please run the database migrator.");
// }
// ourLog.info("Database schema confirmed at expected version " + getCurrentVersion(migrationInfo.get()));
// }
} catch (SQLException e) {
throw new ConfigurationException(Msg.code(28) + "Unable to connect to " + myDataSource, e);
}
@ -114,37 +97,13 @@ public class SchemaMigrator {
}
}
private BaseMigrator newMigrator() {
BaseMigrator migrator;
if (myDontUseFlyway) {
migrator = new TaskOnlyMigrator();
migrator.setDriverType(myDriverType);
migrator.setDataSource(myDataSource);
} else {
migrator = new FlywayMigrator(myMigrationTableName, myDataSource, myDriverType);
migrator.setStrictOrder(myStrictOrder);
}
private HapiMigrator newMigrator() {
HapiMigrator migrator;
migrator = new HapiMigrator(myDriverType, myDataSource, myMigrationTableName);
migrator.addTasks(myMigrationTasks);
migrator.setCallbacks(myCallbacks);
return migrator;
}
private String getCurrentVersion(MigrationInfoService theMigrationInfo) {
MigrationInfo migrationInfo = theMigrationInfo.current();
if (migrationInfo == null) {
return "unknown";
}
return migrationInfo.getVersion().toString();
}
private String getLastVersion(MigrationInfoService theMigrationInfo) {
MigrationInfo[] pending = theMigrationInfo.pending();
if (pending.length > 0) {
return pending[pending.length - 1].getVersion().toString();
}
return "unknown";
}
public void setDriverType(DriverTypeEnum theDriverType) {
myDriverType = theDriverType;
}

View File

@ -1,81 +0,0 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.flywaydb.core.api.MigrationInfoService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
/**
* This class is an alternative to {@link FlywayMigrator). It doesn't use Flyway, but instead just
* executes all tasks.
*/
public class TaskOnlyMigrator extends BaseMigrator {
private static final Logger ourLog = LoggerFactory.getLogger(TaskOnlyMigrator.class);
private List<BaseTask> myTasks = new ArrayList<>();
@Override
public void migrate() {
DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getDataSource());
for (BaseTask next : myTasks) {
next.setDriverType(getDriverType());
next.setDryRun(isDryRun());
next.setNoColumnShrink(isNoColumnShrink());
next.setConnectionProperties(connectionProperties);
try {
if (isDryRun()) {
ourLog.info("Dry run {} {}", next.getFlywayVersion(), next.getDescription());
} else {
ourLog.info("Executing {} {}", next.getFlywayVersion(), next.getDescription());
}
next.execute();
addExecutedStatements(next.getExecutedStatements());
} catch (SQLException e) {
throw new InternalErrorException(Msg.code(48) + e);
}
}
if (isDryRun()) {
StringBuilder statementBuilder = buildExecutedStatementsString();
ourLog.info("SQL that would be executed:\n\n***********************************\n{}***********************************", statementBuilder);
}
}
@Override
public Optional<MigrationInfoService> getMigrationInfo() {
return Optional.empty();
}
@Override
public void addTasks(List<BaseTask> theMigrationTasks) {
myTasks.addAll(theMigrationTasks);
}
}

View File

@ -0,0 +1,9 @@
package ca.uhn.fhir.jpa.migrate.dao;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface HapiMigrationDao extends JpaRepository<HapiMigrationEntity, Integer> {
}

View File

@ -0,0 +1,144 @@
package ca.uhn.fhir.jpa.migrate.entity;
import org.springframework.data.domain.Persistable;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import java.util.Date;
@Entity
@Table(name = "FLY_HFJ_MIGRATION")
public class HapiMigrationEntity implements Persistable<Integer> {
private static final int VERSION_MAX_SIZE = 50;
private static final int DESCRIPTION_MAX_SIZE = 200;
private static final int TYPE_MAX_SIZE = 20;
private static final int SCRIPT_MAX_SIZE = 1000;
private static final int INSTALLED_BY_MAX_SIZE = 100;
@Id
@SequenceGenerator(name = "SEQ_FLY_HFJ_MIGRATION", sequenceName = "SEQ_FLY_HFJ_MIGRATION")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_FLY_HFJ_MIGRATION")
@Column(name = "INSTALLED_RANK")
private Integer myPid;
@Column(name = "VERSION", length = VERSION_MAX_SIZE)
private String myVersion;
@Column(name = "DESCRIPTION", length = DESCRIPTION_MAX_SIZE)
private String myDescription;
@Column(name = "TYPE", length = TYPE_MAX_SIZE)
private String myType;
@Column(name = "SCRIPT", length = SCRIPT_MAX_SIZE)
private String myScript;
@Column(name = "CHECKSUM")
private Integer myChecksum;
@Column(name = "INSTALLED_BY", length = INSTALLED_BY_MAX_SIZE)
private String myInstalledBy;
@Column(name = "INSTALLED_ON")
private Date myInstalledOn;
@Column(name = "EXECUTION_TIME")
private Integer myExecutionTime;
@Column(name = "SUCCESS")
private Boolean mySuccess;
@Override
public Integer getId() {
return myPid;
}
@Override
public boolean isNew() {
return null == getId();
}
public Integer getPid() {
return myPid;
}
public void setPid(Integer thePid) {
myPid = thePid;
}
public String getVersion() {
return myVersion;
}
public void setVersion(String theVersion) {
myVersion = theVersion;
}
public String getDescription() {
return myDescription;
}
public void setDescription(String theDescription) {
myDescription = theDescription;
}
public String getType() {
return myType;
}
public void setType(String theType) {
myType = theType;
}
public String getScript() {
return myScript;
}
public void setScript(String theScript) {
myScript = theScript;
}
public Integer getChecksum() {
return myChecksum;
}
public void setChecksum(Integer theChecksum) {
myChecksum = theChecksum;
}
public String getInstalledBy() {
return myInstalledBy;
}
public void setInstalledBy(String theInstalledBy) {
myInstalledBy = theInstalledBy;
}
public Date getInstalledOn() {
return myInstalledOn;
}
public void setInstalledOn(Date theInstalledOn) {
myInstalledOn = theInstalledOn;
}
public Integer getExecutionTime() {
return myExecutionTime;
}
public void setExecutionTime(Integer theExecutionTime) {
myExecutionTime = theExecutionTime;
}
public Boolean getSuccess() {
return mySuccess;
}
public void setSuccess(Boolean theSuccess) {
mySuccess = theSuccess;
}
}

View File

@ -172,11 +172,11 @@ public abstract class BaseTask {
return changesCount;
} catch (DataAccessException e) {
if (myFailureAllowed) {
ourLog.info("Task {} did not exit successfully, but task is allowed to fail", getFlywayVersion());
ourLog.info("Task {} did not exit successfully, but task is allowed to fail", getMigrationVersion());
ourLog.debug("Error was: {}", e.getMessage(), e);
return 0;
} else {
throw new DataAccessException(Msg.code(61) + "Failed during task " + getFlywayVersion() + ": " + e, e) {
throw new DataAccessException(Msg.code(61) + "Failed during task " + getMigrationVersion() + ": " + e, e) {
private static final long serialVersionUID = 8211678931579252166L;
};
}
@ -245,7 +245,7 @@ public abstract class BaseTask {
myFailureAllowed = theFailureAllowed;
}
public String getFlywayVersion() {
public String getMigrationVersion() {
String releasePart = myProductVersion;
if (releasePart.startsWith("V")) {
releasePart = releasePart.substring(1);
@ -254,7 +254,7 @@ public abstract class BaseTask {
}
protected void logInfo(Logger theLog, String theFormattedMessage, Object... theArguments) {
theLog.info(getFlywayVersion() + ": " + theFormattedMessage, theArguments);
theLog.info(getMigrationVersion() + ": " + theFormattedMessage, theArguments);
}
public void validateVersion() {

View File

@ -26,7 +26,6 @@ import com.google.common.collect.Multimap;
import com.google.common.collect.MultimapBuilder;
import org.apache.commons.lang3.EnumUtils;
import org.apache.commons.lang3.Validate;
import org.flywaydb.core.api.MigrationVersion;
import javax.annotation.Nonnull;
import java.util.ArrayList;
@ -89,7 +88,7 @@ public class BaseMigrationTasks<T extends Enum> {
protected BaseTask getTaskWithVersion(String theFlywayVersion) {
return myTasks.values().stream()
.filter(task -> theFlywayVersion.equals(task.getFlywayVersion()))
.filter(task -> theFlywayVersion.equals(task.getMigrationVersion()))
.findFirst()
.get();
}
@ -97,7 +96,7 @@ public class BaseMigrationTasks<T extends Enum> {
void validate(Collection<BaseTask> theTasks) {
for (BaseTask task : theTasks) {
task.validateVersion();
String version = task.getFlywayVersion();
String version = task.getMigrationVersion();
MigrationVersion migrationVersion = MigrationVersion.fromVersion(version);
if (lastVersion != null) {
if (migrationVersion.compareTo(lastVersion) <= 0) {

View File

@ -0,0 +1,12 @@
package ca.uhn.fhir.jpa.migrate.tasks.api;
// FIXME KHS replace with flyway core library
public class MigrationVersion {
public static MigrationVersion fromVersion(String theVersion) {
return null;
}
public int compareTo(MigrationVersion theLastVersion) {
return 0;
}
}

View File

@ -1,31 +0,0 @@
import org.flywaydb.core.internal.database.DatabaseType;
import org.flywaydb.core.internal.database.DatabaseTypeRegister;
import org.junit.jupiter.api.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class FlywayDbCompatibilityTest {
@Test
public void testFlywayDbCompatibility() {
DatabaseType h2Type = DatabaseTypeRegister.getDatabaseTypeForUrl("jdbc:h2:mem:test");
assertThat(h2Type.getName(), is(equalTo("H2")));
DatabaseType sqlServerType = DatabaseTypeRegister.getDatabaseTypeForUrl("jdbc:sqlserver://localhost:1433;database=test");
assertThat(sqlServerType.getName(), is(equalTo("Azure Synapse")));
DatabaseType oracleType = DatabaseTypeRegister.getDatabaseTypeForUrl("jdbc:oracle:thin:@//host:port/service");
assertThat(oracleType.getName(), is(equalTo("Oracle")));
DatabaseType mySqlType = DatabaseTypeRegister.getDatabaseTypeForUrl("jdbc:mysql://localhost:3306/cdr?serverTimezone=Canada/Eastern");
assertThat(mySqlType.getName(), is(equalTo("MySQL")));
DatabaseType postgresType = DatabaseTypeRegister.getDatabaseTypeForUrl("jdbc:postgresql://localhost:5432/cdr");
assertThat(postgresType.getName(), is(equalTo("CockroachDB")));
DatabaseType mariaDbType = DatabaseTypeRegister.getDatabaseTypeForUrl("jdbc:mariadb://localhost:3306/cdr");
assertThat(mariaDbType.getName(), is(equalTo("MariaDB")));
}
}

View File

@ -1,120 +0,0 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.ISchemaInitializationProvider;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.flywaydb.core.api.migration.Context;
import org.flywaydb.core.internal.database.DatabaseTypeRegister;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.sql.SQLException;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class FlywayMigrationTaskTest {
TestTask myTestTask = new TestTask();
@Mock
private FlywayMigrator myFlywayMigrator;
@Mock
private Context myContext;
@Test
public void schemaInitializedStubsFollowingMigration() {
when(myFlywayMigrator.isSchemaWasInitialized()).thenReturn(true);
FlywayMigrationTask task = new FlywayMigrationTask(myTestTask, myFlywayMigrator);
task.migrate(myContext);
assertTrue(myTestTask.isDoNothing());
}
@Test
public void schemaNotInitializedStubsFollowingMigration() {
when(myFlywayMigrator.isSchemaWasInitialized()).thenReturn(false);
FlywayMigrationTask task = new FlywayMigrationTask(myTestTask, myFlywayMigrator);
task.migrate(myContext);
assertFalse(myTestTask.isDoNothing());
}
@Test
public void schemaInitializedStubsFollowingMigrationExceptInitSchemaTask() {
when(myFlywayMigrator.isSchemaWasInitialized()).thenReturn(true);
InitializeSchemaTask initSchemaTask = new TestInitializeSchemaTask(false);
FlywayMigrationTask task = new FlywayMigrationTask(initSchemaTask, myFlywayMigrator);
task.migrate(myContext);
assertFalse(myTestTask.isDoNothing());
}
@Test
public void schemaInitializedSetsInitializedFlag() {
InitializeSchemaTask initSchemaTask = new TestInitializeSchemaTask(true);
FlywayMigrationTask task = new FlywayMigrationTask(initSchemaTask, myFlywayMigrator);
task.migrate(myContext);
verify(myFlywayMigrator, times(1)).setSchemaWasInitialized(true);
}
@Test
public void nonInitSchemaInitializedSetsInitializedFlag() {
InitializeSchemaTask initSchemaTask = new TestInitializeSchemaTask(false);
FlywayMigrationTask task = new FlywayMigrationTask(initSchemaTask, myFlywayMigrator);
task.migrate(myContext);
verify(myFlywayMigrator, never()).setSchemaWasInitialized(true);
}
// Can't use @Mock since BaseTask.equals is final
private class TestTask extends BaseTask {
protected TestTask() {
super("1", "1");
}
@Override
public void validate() {
// do nothing
}
@Override
protected void doExecute() throws SQLException {
// do nothing
}
@Override
protected void generateHashCode(HashCodeBuilder theBuilder) {
// do nothing
}
@Override
protected void generateEquals(EqualsBuilder theBuilder, BaseTask theOtherObject) {
// do nothing
}
}
private class TestInitializeSchemaTask extends InitializeSchemaTask {
private final boolean myInitializedSchema;
public TestInitializeSchemaTask(boolean theInitializedSchema) {
super("1", "1", mock(ISchemaInitializationProvider.class));
myInitializedSchema = theInitializedSchema;
}
@Override
public void execute() throws SQLException {
// nothing
}
@Override
public boolean initializedSchema() {
return myInitializedSchema;
}
}
}

View File

@ -104,7 +104,7 @@ public class MigrationTaskSkipperTest {
private void assertSkipped(List<BaseTask> theTasks, Integer... theVersions) {
Set<String> expectedVersions = integersToVersions(theVersions).collect(Collectors.toSet());
Set<String> taskVersions = theTasks.stream().filter(BaseTask::isDoNothing).map(BaseTask::getFlywayVersion).collect(Collectors.toSet());
Set<String> taskVersions = theTasks.stream().filter(BaseTask::isDoNothing).map(BaseTask::getMigrationVersion).collect(Collectors.toSet());
assertThat(taskVersions, equalTo(expectedVersions));
}

View File

@ -7,7 +7,6 @@ import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTest;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.flywaydb.core.api.FlywayException;
import org.hamcrest.Matchers;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
@ -59,7 +58,7 @@ public class SchemaMigratorTest extends BaseTest {
try {
schemaMigrator.migrate();
fail();
} catch (FlywayException e) {
} catch (HapiMigrationException e) {
assertEquals(org.springframework.jdbc.BadSqlGrammarException.class, e.getCause().getCause().getClass());
}
schemaMigrator = createTableMigrator();
@ -82,7 +81,7 @@ public class SchemaMigratorTest extends BaseTest {
try {
schemaMigrator.migrate();
fail();
} catch (FlywayException e) {
} catch (HapiMigrationException e) {
assertThat(e.getMessage(), containsString("Detected resolved migration not applied to database: 1.1"));
}
schemaMigrator.setStrictOrder(false);
@ -129,7 +128,6 @@ public class SchemaMigratorTest extends BaseTest {
SchemaMigrator schemaMigrator = createTableMigrator();
schemaMigrator.setDriverType(getDriverType());
schemaMigrator.setDontUseFlyway(true);
// Validate shouldn't fail if we aren't using Flyway
schemaMigrator.validate();

View File

@ -0,0 +1,67 @@
package ca.uhn.fhir.jpa.migrate.config;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.sql.DataSource;
import java.util.Properties;
@EnableJpaRepositories(basePackages = {"ca.uhn.fhir.jpa.migrate.dao"})
@EnableTransactionManagement
public class TestMigrationConfig {
private static final Logger ourLog = LoggerFactory.getLogger(TestMigrationConfig.class);
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory(DataSource theDataSource) {
LocalContainerEntityManagerFactoryBean retVal = new LocalContainerEntityManagerFactoryBean();
retVal.setPackagesToScan("ca.uhn.fhir.jpa.migrate.entity");
retVal.setPersistenceProvider(new HibernatePersistenceProvider());
retVal.setPersistenceUnitName("PU_Migration");
retVal.setDataSource(theDataSource);
retVal.setJpaProperties(jpaProperties());
return retVal;
}
@Bean
public DataSource dataSource() {
BasicDataSource retVal = new BasicDataSource();
retVal.setDriver(new org.h2.Driver());
retVal.setUrl("jdbc:h2:mem:test_migration");
retVal.setMaxWaitMillis(30000);
retVal.setUsername("");
retVal.setPassword("");
retVal.setMaxTotal(5);
return retVal;
}
private Properties jpaProperties() {
Properties extraProperties = new Properties();
extraProperties.put("hibernate.search.enabled", "false");
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
ourLog.info("jpaProperties: {}", extraProperties);
return extraProperties;
}
@Bean
public PlatformTransactionManager transactionManager(LocalContainerEntityManagerFactoryBean theEntityManagerFactory) {
JpaTransactionManager retVal = new JpaTransactionManager();
retVal.setEntityManagerFactory(theEntityManagerFactory.getObject());
return retVal;
}
}

View File

@ -0,0 +1,52 @@
package ca.uhn.fhir.jpa.migrate.dao;
import ca.uhn.fhir.jpa.migrate.config.TestMigrationConfig;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.jupiter.api.Assertions.assertEquals;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {TestMigrationConfig.class})
class HapiMigrationDaoTest {
@Autowired
HapiMigrationDao myHapiMigrationDao;
@AfterEach
void after() {
myHapiMigrationDao.deleteAll();
}
@Test
public void findAll_empty_returnsNothing() {
List<HapiMigrationEntity> result = myHapiMigrationDao.findAll();
assertThat(result, hasSize(0));
}
@Test
public void findAll_2records_returnsBoth() {
HapiMigrationEntity record1 = new HapiMigrationEntity();
String desc1 = "DESC1";
record1.setDescription(desc1);
HapiMigrationEntity result1 = myHapiMigrationDao.save(record1);
assertEquals(1, result1.getId());
HapiMigrationEntity record2 = new HapiMigrationEntity();
String desc2 = "DESC2";
record2.setDescription(desc2);
HapiMigrationEntity result2 = myHapiMigrationDao.save(record2);
assertEquals(2, result2.getId());
List<HapiMigrationEntity> all = myHapiMigrationDao.findAll();
assertThat(all, hasSize(2));
}
}

View File

@ -1,9 +1,9 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import org.flywaydb.core.internal.command.DbMigrate;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
@ -91,7 +91,7 @@ public class AddColumnTest extends BaseTest {
try {
getMigrator().migrate();
fail();
} catch (DbMigrate.FlywayMigrateException e) {
} catch (HapiMigrationException e) {
assertEquals("Migration failed !", e.getMessage());
}
}

View File

@ -1,7 +1,7 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import org.apache.commons.dbcp2.BasicDataSource;
@ -27,7 +27,7 @@ public abstract class BaseTest {
private static int ourDatabaseUrl = 0;
private BasicDataSource myDataSource;
private String myUrl;
private FlywayMigrator myMigrator;
private HapiMigrator myMigrator;
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
public static Stream<Supplier<TestDatabaseDetails>> data() {
@ -46,7 +46,7 @@ public abstract class BaseTest {
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.H2_EMBEDDED.getDriverClassName());
FlywayMigrator migrator = new FlywayMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.H2_EMBEDDED);
HapiMigrator migrator = new HapiMigrator(DriverTypeEnum.H2_EMBEDDED, dataSource, SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@ -67,7 +67,7 @@ public abstract class BaseTest {
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
FlywayMigrator migrator = new FlywayMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
HapiMigrator migrator = new HapiMigrator(DriverTypeEnum.DERBY_EMBEDDED, dataSource, SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@ -123,7 +123,7 @@ public abstract class BaseTest {
});
}
public FlywayMigrator getMigrator() {
public HapiMigrator getMigrator() {
return myMigrator;
}
@ -143,9 +143,9 @@ public abstract class BaseTest {
private final String myUrl;
private final DriverTypeEnum.ConnectionProperties myConnectionProperties;
private final BasicDataSource myDataSource;
private final FlywayMigrator myMigrator;
private final HapiMigrator myMigrator;
public TestDatabaseDetails(String theUrl, DriverTypeEnum.ConnectionProperties theConnectionProperties, BasicDataSource theDataSource, FlywayMigrator theMigrator) {
public TestDatabaseDetails(String theUrl, DriverTypeEnum.ConnectionProperties theConnectionProperties, BasicDataSource theDataSource, HapiMigrator theMigrator) {
myUrl = theUrl;
myConnectionProperties = theConnectionProperties;
myDataSource = theDataSource;

View File

@ -8,8 +8,6 @@ import java.sql.SQLException;
import java.util.function.Supplier;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.core.IsNot.not;
@ -83,9 +81,10 @@ public class DropTableTest extends BaseTest {
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), (hasItems("SOMETABLE")));
assertThat(getMigrator().getMigrationInfo().get().pending().length, greaterThan(0));
// FIXME KHS replace with dao
// assertThat(getMigrator().getMigrationInfo().get().pending().length, greaterThan(0));
getMigrator().migrate();
assertThat(getMigrator().getMigrationInfo().get().pending().length, equalTo(0));
// assertThat(getMigrator().getMigrationInfo().get().pending().length, equalTo(0));
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), not(hasItems("SOMETABLE")));
}

View File

@ -1,8 +1,8 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.flywaydb.core.internal.command.DbMigrate;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
@ -300,7 +300,7 @@ public class ModifyColumnTest extends BaseTest {
try {
getMigrator().migrate();
fail();
} catch (DbMigrate.FlywayMigrateException e) {
} catch (HapiMigrationException e) {
// expected
}

View File

@ -1,8 +1,8 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.flywaydb.core.api.FlywayException;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
@ -166,7 +166,8 @@ public class RenameColumnTaskTest extends BaseTest {
try {
getMigrator().migrate();
fail();
} catch (FlywayException e) {
} catch (HapiMigrationException e) {
// FIXME KHS get this to pass
assertEquals(Msg.code(47) + "Failure executing task \"Drop an index\", aborting! Cause: java.sql.SQLException: "+ Msg.code(54) + "Can not rename SOMETABLE.myTextCol to TEXTCOL because both columns exist and data exists in TEXTCOL", e.getCause().getCause().getMessage());
}
@ -249,7 +250,7 @@ public class RenameColumnTaskTest extends BaseTest {
try {
getMigrator().migrate();
fail();
} catch (FlywayException e) {
} catch (HapiMigrationException e) {
assertEquals(Msg.code(47) + "Failure executing task \"RenameColumnTask\", aborting! Cause: java.sql.SQLException: "+ Msg.code(56) + "Can not rename SOMETABLE.myTextCol to TEXTCOL because neither column exists!", e.getCause().getCause().getMessage());
}
@ -289,7 +290,7 @@ public class RenameColumnTaskTest extends BaseTest {
try {
getMigrator().migrate();
fail();
} catch (FlywayException e) {
} catch (HapiMigrationException e) {
assertEquals(Msg.code(47) + "Failure executing task \"RenameColumnTask\", aborting! Cause: java.sql.SQLException: "+ Msg.code(55) + "Can not rename SOMETABLE.PID to PID2 because both columns exist!", e.getCause().getCause().getMessage());
}

16
pom.xml
View File

@ -813,7 +813,6 @@
<jsr305_version>3.0.2</jsr305_version>
<junit_version>5.8.2</junit_version>
<flexmark_version>0.50.40</flexmark_version>
<flyway_version>8.5.0</flyway_version>
<hibernate_version>5.6.2.Final</hibernate_version>
<hibernate_search_version>6.1.6.Final</hibernate_search_version>
<!-- Update lucene version when you update hibernate-search version -->
@ -1958,21 +1957,6 @@
<artifactId>xpp3_xpath</artifactId>
<version>1.1.4c</version>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
<version>${flyway_version}</version>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-sqlserver</artifactId>
<version>${flyway_version}</version>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-mysql</artifactId>
<version>${flyway_version}</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-test</artifactId>