mirror of
https://github.com/hapifhir/hapi-fhir.git
synced 2025-02-18 02:45:07 +00:00
started wiring up new dao service
This commit is contained in:
parent
db3b0f9417
commit
2f02546d94
@ -67,7 +67,10 @@
|
||||
<groupId>org.jetbrains</groupId>
|
||||
<artifactId>annotations</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.flywaydb</groupId>
|
||||
<artifactId>flyway-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-jpa</artifactId>
|
||||
|
@ -0,0 +1,43 @@
|
||||
package ca.uhn.fhir.jpa.migrate;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
|
||||
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||
import org.flywaydb.core.api.MigrationVersion;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class HapiMigrationStorageSvc {
|
||||
private final HapiMigrationDao myHapiMigrationDao;
|
||||
|
||||
public HapiMigrationStorageSvc(HapiMigrationDao theHapiMigrationDao) {
|
||||
myHapiMigrationDao = theHapiMigrationDao;
|
||||
}
|
||||
|
||||
public List<HapiMigrationEntity> fetchAppliedMigrations() {
|
||||
return myHapiMigrationDao.findAll();
|
||||
}
|
||||
|
||||
public List<BaseTask> diff(List<BaseTask> theTasks) {
|
||||
Set<MigrationVersion> appliedMigrationVersions = fetchAppliedMigrationVersions();
|
||||
|
||||
return theTasks.stream()
|
||||
.filter(task -> !appliedMigrationVersions.contains(MigrationVersion.fromVersion(task.getMigrationVersion())))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private Set<MigrationVersion> fetchAppliedMigrationVersions() {
|
||||
return myHapiMigrationDao.fetchMigrationVersions().stream().map(MigrationVersion::fromVersion).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
// FIXME KHS test
|
||||
public String getLatestAppliedVersion() {
|
||||
return fetchAppliedMigrationVersions().stream()
|
||||
.sorted()
|
||||
.map(MigrationVersion::toString)
|
||||
.reduce((first, second) -> second)
|
||||
.orElse("unknown");
|
||||
}
|
||||
}
|
@ -24,12 +24,17 @@ import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.flywaydb.core.api.callback.Callback;
|
||||
import org.flywaydb.core.api.callback.Event;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.sql.DataSource;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
@ -37,7 +42,7 @@ import java.util.Objects;
|
||||
* This class is an alternative to {@link HapiMigrator ). It doesn't use Flyway, but instead just
|
||||
* executes all tasks.
|
||||
*/
|
||||
public class HapiMigrator implements IMigrator {
|
||||
public class HapiMigrator {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrator.class);
|
||||
private List<BaseTask> myTasks = new ArrayList<>();
|
||||
@ -47,6 +52,7 @@ public class HapiMigrator implements IMigrator {
|
||||
private final DriverTypeEnum myDriverType;
|
||||
private final DataSource myDataSource;
|
||||
private final String myMigrationTableName;
|
||||
private List<Callback> myCallbacks = Collections.emptyList();
|
||||
|
||||
public HapiMigrator(DriverTypeEnum theDriverType, DataSource theDataSource, String theMigrationTableName) {
|
||||
myDriverType = theDriverType;
|
||||
@ -102,7 +108,6 @@ public class HapiMigrator implements IMigrator {
|
||||
return statementBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void migrate() {
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getDataSource());
|
||||
|
||||
@ -118,6 +123,8 @@ public class HapiMigrator implements IMigrator {
|
||||
} else {
|
||||
ourLog.info("Executing {} {}", next.getMigrationVersion(), next.getDescription());
|
||||
}
|
||||
// FIXME KHS replace with different callback
|
||||
myCallbacks.forEach(action -> action.handle(Event.BEFORE_EACH_MIGRATE, null));
|
||||
next.execute();
|
||||
addExecutedStatements(next.getExecutedStatements());
|
||||
} catch (SQLException e) {
|
||||
@ -130,16 +137,24 @@ public class HapiMigrator implements IMigrator {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addTasks(List<BaseTask> theMigrationTasks) {
|
||||
myTasks.addAll(theMigrationTasks);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addTask(BaseTask theTask) {
|
||||
myTasks.add(theTask);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public List<Callback> getCallbacks() {
|
||||
return myCallbacks;
|
||||
}
|
||||
|
||||
public void setCallbacks(@Nonnull List<Callback> theCallbacks) {
|
||||
Validate.notNull(theCallbacks);
|
||||
myCallbacks = theCallbacks;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void removeAllTasksForUnitTest() {
|
||||
myTasks.clear();
|
||||
|
@ -1,34 +0,0 @@
|
||||
package ca.uhn.fhir.jpa.migrate;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Server - SQL Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface IMigrator {
|
||||
|
||||
void migrate();
|
||||
|
||||
void addTasks(List<BaseTask> theMigrationTasks);
|
||||
|
||||
void addTask(BaseTask theTask);
|
||||
}
|
@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.migrate;
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||
import org.flywaydb.core.api.MigrationVersion;
|
||||
import org.flywaydb.core.api.callback.Callback;
|
||||
import org.hibernate.cfg.AvailableSettings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@ -30,6 +32,7 @@ import org.slf4j.LoggerFactory;
|
||||
import javax.sql.DataSource;
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
@ -41,23 +44,21 @@ public class SchemaMigrator {
|
||||
private final boolean mySkipValidation;
|
||||
private final String myMigrationTableName;
|
||||
private final List<BaseTask> myMigrationTasks;
|
||||
private boolean myStrictOrder;
|
||||
private DriverTypeEnum myDriverType;
|
||||
private List<Callback> myCallbacks = Collections.emptyList();
|
||||
private final HapiMigrationStorageSvc myHapiMigrationStorageSvc;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public SchemaMigrator(String theSchemaName, String theMigrationTableName, DataSource theDataSource, Properties jpaProperties, List<BaseTask> theMigrationTasks) {
|
||||
public SchemaMigrator(String theSchemaName, String theMigrationTableName, DataSource theDataSource, Properties jpaProperties, List<BaseTask> theMigrationTasks, HapiMigrationStorageSvc theHapiMigrationStorageSvc) {
|
||||
mySchemaName = theSchemaName;
|
||||
myDataSource = theDataSource;
|
||||
myMigrationTableName = theMigrationTableName;
|
||||
myMigrationTasks = theMigrationTasks;
|
||||
|
||||
mySkipValidation = jpaProperties.containsKey(AvailableSettings.HBM2DDL_AUTO) && "update".equals(jpaProperties.getProperty(AvailableSettings.HBM2DDL_AUTO));
|
||||
}
|
||||
|
||||
public void setStrictOrder(boolean theStrictOrder) {
|
||||
myStrictOrder = theStrictOrder;
|
||||
myHapiMigrationStorageSvc = theHapiMigrationStorageSvc;
|
||||
}
|
||||
|
||||
public void validate() {
|
||||
@ -66,20 +67,31 @@ public class SchemaMigrator {
|
||||
return;
|
||||
}
|
||||
try (Connection connection = myDataSource.getConnection()) {
|
||||
// FIXME KHS
|
||||
// if (migrationInfo.isPresent()) {
|
||||
// if (migrationInfo.get().pending().length > 0) {
|
||||
//
|
||||
// String url = connection.getMetaData().getURL();
|
||||
// throw new ConfigurationException(Msg.code(27) + "The database schema for " + url + " is out of date. " +
|
||||
// "Current database schema version is " + getCurrentVersion(migrationInfo.get()) + ". Schema version required by application is " +
|
||||
// getLastVersion(migrationInfo.get()) + ". Please run the database migrator.");
|
||||
// }
|
||||
// ourLog.info("Database schema confirmed at expected version " + getCurrentVersion(migrationInfo.get()));
|
||||
// }
|
||||
List<BaseTask> unappliedMigrations = myHapiMigrationStorageSvc.diff(myMigrationTasks);
|
||||
|
||||
if (unappliedMigrations.size() > 0) {
|
||||
|
||||
String url = connection.getMetaData().getURL();
|
||||
throw new ConfigurationException(Msg.code(27) + "The database schema for " + url + " is out of date. " +
|
||||
"Current database schema version is " + myHapiMigrationStorageSvc.getLatestAppliedVersion() + ". Schema version required by application is " +
|
||||
getLastVersion(myMigrationTasks) + ". Please run the database migrator.");
|
||||
}
|
||||
ourLog.info("Database schema confirmed at expected version " + myHapiMigrationStorageSvc.getLatestAppliedVersion());
|
||||
} catch (SQLException e) {
|
||||
throw new ConfigurationException(Msg.code(28) + "Unable to connect to " + myDataSource, e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private String getLastVersion(List<BaseTask> theMigrationTasks) {
|
||||
// FIXME make task list a first-order class
|
||||
return theMigrationTasks.stream()
|
||||
.map(BaseTask::getMigrationVersion)
|
||||
.map(MigrationVersion::fromVersion)
|
||||
.sorted()
|
||||
.map(MigrationVersion::toString)
|
||||
.reduce((first, second) -> second)
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
public void migrate() {
|
||||
@ -101,10 +113,15 @@ public class SchemaMigrator {
|
||||
HapiMigrator migrator;
|
||||
migrator = new HapiMigrator(myDriverType, myDataSource, myMigrationTableName);
|
||||
migrator.addTasks(myMigrationTasks);
|
||||
migrator.setCallbacks(myCallbacks);
|
||||
return migrator;
|
||||
}
|
||||
|
||||
public void setDriverType(DriverTypeEnum theDriverType) {
|
||||
myDriverType = theDriverType;
|
||||
}
|
||||
|
||||
public void setCallbacks(List<Callback> theCallbacks) {
|
||||
myCallbacks = theCallbacks;
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,14 @@
|
||||
package ca.uhn.fhir.jpa.migrate.config;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc;
|
||||
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
@Configuration
|
||||
public class HapiMigrationConfig {
|
||||
@Bean
|
||||
HapiMigrationStorageSvc myHapiMigrationStorageSvc(HapiMigrationDao theHapiMigrationDao) {
|
||||
return new HapiMigrationStorageSvc(theHapiMigrationDao);
|
||||
}
|
||||
}
|
@ -2,8 +2,13 @@ package ca.uhn.fhir.jpa.migrate.dao;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Repository
|
||||
public interface HapiMigrationDao extends JpaRepository<HapiMigrationEntity, Integer> {
|
||||
@Query("SELECT s.myVersion FROM HapiMigrationEntity s")
|
||||
List<String> fetchMigrationVersions();
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package ca.uhn.fhir.jpa.migrate.entity;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||
import org.springframework.data.domain.Persistable;
|
||||
|
||||
import javax.persistence.Column;
|
||||
@ -141,4 +142,13 @@ public class HapiMigrationEntity implements Persistable<Integer> {
|
||||
public void setSuccess(Boolean theSuccess) {
|
||||
mySuccess = theSuccess;
|
||||
}
|
||||
|
||||
public static HapiMigrationEntity fromBaseTask(BaseTask theTask) {
|
||||
HapiMigrationEntity retval = new HapiMigrationEntity();
|
||||
retval.setVersion(theTask.getMigrationVersion());
|
||||
retval.setDescription(theTask.getDescription());
|
||||
retval.setChecksum(theTask.hashCode());
|
||||
retval.setType("JDBC");
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ import com.google.common.collect.Multimap;
|
||||
import com.google.common.collect.MultimapBuilder;
|
||||
import org.apache.commons.lang3.EnumUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.flywaydb.core.api.MigrationVersion;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
|
@ -1,12 +0,0 @@
|
||||
package ca.uhn.fhir.jpa.migrate.tasks.api;
|
||||
|
||||
// FIXME KHS replace with flyway core library
|
||||
public class MigrationVersion {
|
||||
public static MigrationVersion fromVersion(String theVersion) {
|
||||
return null;
|
||||
}
|
||||
|
||||
public int compareTo(MigrationVersion theLastVersion) {
|
||||
return 0;
|
||||
}
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
package ca.uhn.fhir.jpa.migrate;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.config.HapiMigrationConfig;
|
||||
import ca.uhn.fhir.jpa.migrate.config.TestMigrationConfig;
|
||||
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = {TestMigrationConfig.class, HapiMigrationConfig.class})
|
||||
public class BaseMigrationTest {
|
||||
@Autowired
|
||||
protected
|
||||
HapiMigrationDao myHapiMigrationDao;
|
||||
|
||||
@AfterEach
|
||||
void after() {
|
||||
myHapiMigrationDao.deleteAll();
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,83 @@
|
||||
package ca.uhn.fhir.jpa.migrate;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.DropTableTask;
|
||||
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
|
||||
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
class HapiMigrationStorageSvcTest extends BaseMigrationTest {
|
||||
private static final String RELEASE = "V5_5_0";
|
||||
@Autowired
|
||||
HapiMigrationStorageSvc myHapiMigrationStorageSvc;
|
||||
|
||||
@BeforeEach
|
||||
void before() {
|
||||
List<BaseTask> tasks = createTasks();
|
||||
assertThat(tasks, hasSize(7));
|
||||
|
||||
for (BaseTask task : tasks) {
|
||||
HapiMigrationEntity entity = HapiMigrationEntity.fromBaseTask(task);
|
||||
myHapiMigrationDao.save(entity);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void diff_oneNew_returnsNew() {
|
||||
List<HapiMigrationEntity> appliedMigrations = myHapiMigrationStorageSvc.fetchAppliedMigrations();
|
||||
assertThat(appliedMigrations, hasSize(7));
|
||||
|
||||
List<BaseTask> tasks = createTasks();
|
||||
BaseTask dropTableTask = new DropTableTask(RELEASE, "20210722.4");
|
||||
tasks.add(dropTableTask);
|
||||
|
||||
List<BaseTask> notAppliedYet = myHapiMigrationStorageSvc.diff(tasks);
|
||||
assertThat(notAppliedYet, hasSize(1));
|
||||
assertEquals("5_5_0.20210722.4", notAppliedYet.get(0).getMigrationVersion());
|
||||
}
|
||||
|
||||
private List<BaseTask> createTasks() {
|
||||
List<BaseTask> tasks = new ArrayList<>();
|
||||
|
||||
Builder version = forVersion(tasks);
|
||||
|
||||
Builder.BuilderAddTableByColumns cmpToks = version
|
||||
.addTableByColumns("20210720.3", "HFJ_IDX_CMB_TOK_NU", "PID");
|
||||
cmpToks.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG);
|
||||
cmpToks.addColumn("RES_ID").nonNullable().type(ColumnTypeEnum.LONG);
|
||||
cmpToks.addColumn("HASH_COMPLETE").nonNullable().type(ColumnTypeEnum.LONG);
|
||||
cmpToks.addColumn("IDX_STRING").nonNullable().type(ColumnTypeEnum.STRING, 500);
|
||||
cmpToks.addForeignKey("20210720.4", "FK_IDXCMBTOKNU_RES_ID").toColumn("RES_ID").references("HFJ_RESOURCE", "RES_ID");
|
||||
cmpToks.addIndex("20210720.5", "IDX_IDXCMBTOKNU_STR").unique(false).withColumns("IDX_STRING");
|
||||
cmpToks.addIndex("20210720.6", "IDX_IDXCMBTOKNU_RES").unique(false).withColumns("RES_ID");
|
||||
|
||||
Builder.BuilderWithTableName cmbTokNuTable = version.onTable("HFJ_IDX_CMB_TOK_NU");
|
||||
|
||||
cmbTokNuTable.addColumn("20210722.1", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT);
|
||||
cmbTokNuTable.addColumn("20210722.2", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY);
|
||||
cmbTokNuTable.modifyColumn("20210722.3", "RES_ID").nullable().withType(ColumnTypeEnum.LONG);
|
||||
|
||||
return tasks;
|
||||
}
|
||||
|
||||
public Builder forVersion(List<BaseTask> theTasks) {
|
||||
BaseMigrationTasks.IAcceptsTasks sink = theTask -> {
|
||||
theTask.validate();
|
||||
theTasks.add(theTask);
|
||||
};
|
||||
return new Builder(RELEASE, sink);
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -5,6 +5,7 @@ import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.AddTableRawSqlTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTest;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.DropTableTask;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.hamcrest.Matchers;
|
||||
@ -13,6 +14,8 @@ import org.junit.jupiter.params.provider.MethodSource;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
@ -23,6 +26,8 @@ import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.endsWith;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class SchemaMigratorTest extends BaseTest {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SchemaMigratorTest.class);
|
||||
@ -35,6 +40,11 @@ public class SchemaMigratorTest extends BaseTest {
|
||||
|
||||
SchemaMigrator schemaMigrator = createTableMigrator();
|
||||
|
||||
List<BaseTask> list = new ArrayList<>();
|
||||
list.add(new DropTableTask("1", "1.1"));
|
||||
when(myHapiMigrationStorageSvc.diff(any())).thenReturn(list);
|
||||
when(myHapiMigrationStorageSvc.getLatestAppliedVersion()).thenReturn("unknown");
|
||||
|
||||
try {
|
||||
schemaMigrator.validate();
|
||||
fail();
|
||||
@ -76,7 +86,6 @@ public class SchemaMigratorTest extends BaseTest {
|
||||
AddTableRawSqlTask task1 = createAddTableTask("SOMEOTHERTABLE", "create table SOMEOTHERTABLE (PID bigint not null, TEXTCOL varchar(255))", "1");
|
||||
AddTableRawSqlTask task2 = createAddTableTask("SOMETABLE", "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))", "2");
|
||||
schemaMigrator = createSchemaMigrator(task1, task2);
|
||||
schemaMigrator.setStrictOrder(true);
|
||||
|
||||
try {
|
||||
schemaMigrator.migrate();
|
||||
@ -84,7 +93,6 @@ public class SchemaMigratorTest extends BaseTest {
|
||||
} catch (HapiMigrationException e) {
|
||||
assertThat(e.getMessage(), containsString("Detected resolved migration not applied to database: 1.1"));
|
||||
}
|
||||
schemaMigrator.setStrictOrder(false);
|
||||
schemaMigrator.migrate();
|
||||
}
|
||||
|
||||
@ -111,7 +119,7 @@ public class SchemaMigratorTest extends BaseTest {
|
||||
|
||||
ImmutableList<BaseTask> taskList = ImmutableList.of(taskA, taskB, taskC, taskD);
|
||||
MigrationTaskSkipper.setDoNothingOnSkippedTasks(taskList, "4_1_0.20191214.2, 4_1_0.20191214.4");
|
||||
SchemaMigrator schemaMigrator = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), taskList);
|
||||
SchemaMigrator schemaMigrator = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), taskList, myHapiMigrationStorageSvc);
|
||||
schemaMigrator.setDriverType(getDriverType());
|
||||
|
||||
schemaMigrator.migrate();
|
||||
@ -155,7 +163,7 @@ public class SchemaMigratorTest extends BaseTest {
|
||||
|
||||
@Nonnull
|
||||
private SchemaMigrator createSchemaMigrator(BaseTask... tasks) {
|
||||
SchemaMigrator retVal = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), Lists.newArrayList(tasks));
|
||||
SchemaMigrator retVal = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), Lists.newArrayList(tasks), myHapiMigrationStorageSvc);
|
||||
retVal.setDriverType(getDriverType());
|
||||
return retVal;
|
||||
}
|
||||
|
@ -1,13 +1,8 @@
|
||||
package ca.uhn.fhir.jpa.migrate.dao;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.config.TestMigrationConfig;
|
||||
import ca.uhn.fhir.jpa.migrate.BaseMigrationTest;
|
||||
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@ -15,16 +10,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = {TestMigrationConfig.class})
|
||||
class HapiMigrationDaoTest {
|
||||
@Autowired
|
||||
HapiMigrationDao myHapiMigrationDao;
|
||||
|
||||
@AfterEach
|
||||
void after() {
|
||||
myHapiMigrationDao.deleteAll();
|
||||
}
|
||||
class HapiMigrationDaoTest extends BaseMigrationTest {
|
||||
|
||||
@Test
|
||||
public void findAll_empty_returnsNothing() {
|
||||
|
@ -1,12 +1,16 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc;
|
||||
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.intellij.lang.annotations.Language;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.ColumnMapRowMapper;
|
||||
@ -20,6 +24,7 @@ import java.util.function.Supplier;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public abstract class BaseTest {
|
||||
|
||||
private static final String DATABASE_NAME = "DATABASE";
|
||||
@ -29,6 +34,8 @@ public abstract class BaseTest {
|
||||
private String myUrl;
|
||||
private HapiMigrator myMigrator;
|
||||
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
|
||||
@Mock
|
||||
protected HapiMigrationStorageSvc myHapiMigrationStorageSvc;
|
||||
|
||||
public static Stream<Supplier<TestDatabaseDetails>> data() {
|
||||
ourLog.info("H2: {}", org.h2.Driver.class.toString());
|
||||
|
6
pom.xml
6
pom.xml
@ -813,6 +813,7 @@
|
||||
<jsr305_version>3.0.2</jsr305_version>
|
||||
<junit_version>5.8.2</junit_version>
|
||||
<flexmark_version>0.50.40</flexmark_version>
|
||||
<flyway_version>8.5.0</flyway_version>
|
||||
<hibernate_version>5.6.2.Final</hibernate_version>
|
||||
<hibernate_search_version>6.1.6.Final</hibernate_search_version>
|
||||
<!-- Update lucene version when you update hibernate-search version -->
|
||||
@ -1957,6 +1958,11 @@
|
||||
<artifactId>xpp3_xpath</artifactId>
|
||||
<version>1.1.4c</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.flywaydb</groupId>
|
||||
<artifactId>flyway-core</artifactId>
|
||||
<version>${flyway_version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.batch</groupId>
|
||||
<artifactId>spring-batch-test</artifactId>
|
||||
|
Loading…
x
Reference in New Issue
Block a user