replace jpa with sqlbuilder

This commit is contained in:
Ken Stevens 2022-09-17 14:21:43 -04:00
parent 2f02546d94
commit afcb6e68c9
15 changed files with 310 additions and 186 deletions

View File

@ -1,6 +1,7 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import org.apache.commons.dbcp2.BasicDataSource;

View File

@ -71,34 +71,21 @@
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
</dependency>
<!-- SQL Builder -->
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-jpa</artifactId>
<groupId>com.healthmarketscience.sqlbuilder</groupId>
<artifactId>sqlbuilder</artifactId>
</dependency>
<!-- test -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>test</scope>
</dependency>
<!--
These have been added as explicit dependencies
as JDK9 no longer includes them by default
-->
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>${jaxb_api_version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</dependencies>
<build>

View File

@ -1,7 +1,6 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.flywaydb.core.api.MigrationVersion;
@ -16,10 +15,6 @@ public class HapiMigrationStorageSvc {
myHapiMigrationDao = theHapiMigrationDao;
}
public List<HapiMigrationEntity> fetchAppliedMigrations() {
return myHapiMigrationDao.findAll();
}
public List<BaseTask> diff(List<BaseTask> theTasks) {
Set<MigrationVersion> appliedMigrationVersions = fetchAppliedMigrationVersions();
@ -28,8 +23,8 @@ public class HapiMigrationStorageSvc {
.collect(Collectors.toList());
}
private Set<MigrationVersion> fetchAppliedMigrationVersions() {
return myHapiMigrationDao.fetchMigrationVersions().stream().map(MigrationVersion::fromVersion).collect(Collectors.toSet());
Set<MigrationVersion> fetchAppliedMigrationVersions() {
return myHapiMigrationDao.fetchMigrationVersions();
}
// FIXME KHS test

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.migrate;
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import com.google.common.annotations.VisibleForTesting;
@ -52,6 +53,7 @@ public class HapiMigrator {
private final DriverTypeEnum myDriverType;
private final DataSource myDataSource;
private final String myMigrationTableName;
private final HapiMigrationStorageSvc myHapiMigrationStorageSvc;
private List<Callback> myCallbacks = Collections.emptyList();
public HapiMigrator(DriverTypeEnum theDriverType, DataSource theDataSource, String theMigrationTableName) {
@ -59,6 +61,7 @@ public class HapiMigrator {
myDataSource = theDataSource;
// FIXME KHS use tablename
myMigrationTableName = theMigrationTableName;
myHapiMigrationStorageSvc = new HapiMigrationStorageSvc(new HapiMigrationDao(theDataSource, theMigrationTableName));
}
public DataSource getDataSource() {
@ -123,9 +126,12 @@ public class HapiMigrator {
} else {
ourLog.info("Executing {} {}", next.getMigrationVersion(), next.getDescription());
}
// FIXME KHS replace with different callback
// FIXME KHS replace with different callback probably a BaseTask consumer
myCallbacks.forEach(action -> action.handle(Event.BEFORE_EACH_MIGRATE, null));
// FIXME KHS break up
// preExecute(next);
next.execute();
// postExecute(next);
addExecutedStatements(next.getExecutedStatements());
} catch (SQLException e) {
throw new InternalErrorException(Msg.code(48) + e);

View File

@ -1,14 +0,0 @@
package ca.uhn.fhir.jpa.migrate.config;
import ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class HapiMigrationConfig {
@Bean
HapiMigrationStorageSvc myHapiMigrationStorageSvc(HapiMigrationDao theHapiMigrationDao) {
return new HapiMigrationStorageSvc(theHapiMigrationDao);
}
}

View File

@ -1,14 +1,83 @@
package ca.uhn.fhir.jpa.migrate.dao;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import ca.uhn.fhir.util.VersionEnum;
import org.apache.commons.lang3.Validate;
import org.flywaydb.core.api.MigrationVersion;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.BadSqlGrammarException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import javax.sql.DataSource;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
@Repository
public interface HapiMigrationDao extends JpaRepository<HapiMigrationEntity, Integer> {
@Query("SELECT s.myVersion FROM HapiMigrationEntity s")
List<String> fetchMigrationVersions();
public class HapiMigrationDao {
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrationDao.class);
private final JdbcTemplate myJdbcTemplate;
private final String myMigrationTablename;
private final MigrationQueryBuilder myMigrationQueryBuilder;
private final DataSource myDataSource;
public HapiMigrationDao(DataSource theDataSource, String theMigrationTablename) {
myDataSource = theDataSource;
myJdbcTemplate = new JdbcTemplate(theDataSource);
myMigrationTablename = theMigrationTablename;
myMigrationQueryBuilder = new MigrationQueryBuilder(theMigrationTablename);
}
public Set<MigrationVersion> fetchMigrationVersions() {
String query = myMigrationQueryBuilder.findVersionQuery();
List<String> result = myJdbcTemplate.queryForList(query, String.class);
return result.stream()
.map(MigrationVersion::fromVersion)
.collect(Collectors.toSet());
}
public void deleteAll() {
myJdbcTemplate.execute(myMigrationQueryBuilder.deleteAll());
}
public HapiMigrationEntity save(HapiMigrationEntity theEntity) {
Validate.notNull(theEntity.getDescription(), "Description may not be null");
Validate.notNull(theEntity.getExecutionTime(), "Execution time may not be null");
Validate.notNull(theEntity.getSuccess(), "Success may not be null");
Integer highestKey = getHighestKey();
if (highestKey == null || highestKey < 0) {
highestKey = 0;
}
Integer nextAvailableKey = highestKey + 1;
theEntity.setPid(nextAvailableKey);
theEntity.setType("JDBC");
theEntity.setScript("HAPI FHIR");
theEntity.setInstalledBy(VersionEnum.latestVersion().name());
theEntity.setInstalledOn(new Date());
String insertRecordStatement = myMigrationQueryBuilder.insertStatement(theEntity);
int result = myJdbcTemplate.update(insertRecordStatement);
return theEntity;
}
private Integer getHighestKey() {
String highestKeyQuery = myMigrationQueryBuilder.getHighestKeyQuery();
// FIXME KHS
RowMapper<HapiMigrationEntity> mapper = HapiMigrationEntity.newRowMapper();
List<HapiMigrationEntity> list = myJdbcTemplate.query("SELECT * FROM TEST_MIGRATION_TABLE", mapper);
return myJdbcTemplate.queryForObject(highestKeyQuery, Integer.class);
}
// FIXME KHS use this in migration
public void createMigrationTableIfRequired() {
try {
fetchMigrationVersions();
} catch (BadSqlGrammarException e) {
ourLog.info("Creating table {}", myMigrationTablename);
myJdbcTemplate.execute(myMigrationQueryBuilder.createTableStatement());
}
}
}

View File

@ -0,0 +1,113 @@
package ca.uhn.fhir.jpa.migrate.dao;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import com.healthmarketscience.sqlbuilder.CreateTableQuery;
import com.healthmarketscience.sqlbuilder.DeleteQuery;
import com.healthmarketscience.sqlbuilder.FunctionCall;
import com.healthmarketscience.sqlbuilder.InsertQuery;
import com.healthmarketscience.sqlbuilder.JdbcEscape;
import com.healthmarketscience.sqlbuilder.SelectQuery;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbSchema;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbSpec;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Types;
public class MigrationQueryBuilder {
private static final Logger ourLog = LoggerFactory.getLogger(MigrationQueryBuilder.class);
private final DbSpec mySpec;
private final DbSchema mySchema;
private final DbTable myTable;
private final DbColumn myVersionCol;
private final DbColumn myInstalledRankCol;
private final DbColumn myDescriptionCol;
private final DbColumn myTypeCol;
private final DbColumn myScriptCol;
private final DbColumn myChecksumCol;
private final DbColumn myInstalledByCol;
private final DbColumn myInstalledOnCol;
private final DbColumn myExecutionTimeCol;
private final DbColumn mySuccessCol;
private final String myFindVersionQuery;
private final String myDeleteAll;
private final String myHighestKeyQuery;
public MigrationQueryBuilder(String theMigrationTablename) {
mySpec = new DbSpec();
mySchema = mySpec.addDefaultSchema();
myTable = mySchema.addTable(theMigrationTablename);
myInstalledRankCol = myTable.addColumn("INSTALLED_RANK", Types.INTEGER, null);
myInstalledRankCol.notNull();
myVersionCol = myTable.addColumn("VERSION", Types.VARCHAR, HapiMigrationEntity.VERSION_MAX_SIZE);
myDescriptionCol = myTable.addColumn("DESCRIPTION", Types.VARCHAR, HapiMigrationEntity.DESCRIPTION_MAX_SIZE);
myDescriptionCol.notNull();
myTypeCol = myTable.addColumn("TYPE", Types.VARCHAR, HapiMigrationEntity.TYPE_MAX_SIZE);
myTypeCol.notNull();
myScriptCol = myTable.addColumn("SCRIPT", Types.VARCHAR, HapiMigrationEntity.SCRIPT_MAX_SIZE);
myScriptCol.notNull();
myChecksumCol = myTable.addColumn("CHECKSUM", Types.INTEGER, null);
myInstalledByCol = myTable.addColumn("INSTALLED_BY", Types.VARCHAR, HapiMigrationEntity.INSTALLED_BY_MAX_SIZE);
myInstalledByCol.notNull();
myInstalledOnCol = myTable.addColumn("INSTALLED_ON", Types.TIME, null);
myInstalledOnCol.notNull();
myExecutionTimeCol = myTable.addColumn("EXECUTION_TIME", Types.INTEGER, null);
myExecutionTimeCol.notNull();
mySuccessCol = myTable.addColumn("SUCCESS", Types.BOOLEAN, null);
mySuccessCol.notNull();
myFindVersionQuery = new SelectQuery().addColumns(myVersionCol).validate().toString();
myDeleteAll = new DeleteQuery(myTable).toString();
myHighestKeyQuery = buildHighestKeyQuery();
}
public String findVersionQuery() {
return myFindVersionQuery;
}
public String deleteAll() {
return myDeleteAll;
}
public String getHighestKeyQuery() {
return myHighestKeyQuery;
}
private String buildHighestKeyQuery() {
return new SelectQuery()
.addCustomColumns(FunctionCall.max().addColumnParams(myInstalledRankCol))
.validate()
.toString();
}
public String insertStatement(HapiMigrationEntity theEntity) {
String retval = new InsertQuery(myTable)
.addColumn(myInstalledRankCol, theEntity.getPid())
.addColumn(myVersionCol, theEntity.getVersion())
.addColumn(myDescriptionCol, theEntity.getDescription())
.addColumn(myTypeCol, theEntity.getType())
.addColumn(myScriptCol, theEntity.getScript())
.addColumn(myChecksumCol, theEntity.getChecksum())
.addColumn(myInstalledByCol, theEntity.getInstalledBy())
.addColumn(myInstalledOnCol, JdbcEscape.timestamp(theEntity.getInstalledOn()))
.addColumn(myExecutionTimeCol, theEntity.getExecutionTime())
.addColumn(mySuccessCol, theEntity.getSuccess())
.validate()
.toString();
// FIXME KHS
ourLog.info(retval);
return retval;
}
public String createTableStatement() {
return new CreateTableQuery(myTable, true)
.validate()
.toString();
}
}

View File

@ -1,7 +1,7 @@
package ca.uhn.fhir.jpa.migrate.entity;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.springframework.data.domain.Persistable;
import org.springframework.jdbc.core.RowMapper;
import javax.persistence.Column;
import javax.persistence.Entity;
@ -9,17 +9,17 @@ import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import java.util.Date;
// Note even though we are using javax.persistence annotations here, we are managing these records outside of jpa
// so these annotations are for informational purposes only
@Entity
@Table(name = "FLY_HFJ_MIGRATION")
public class HapiMigrationEntity implements Persistable<Integer> {
private static final int VERSION_MAX_SIZE = 50;
private static final int DESCRIPTION_MAX_SIZE = 200;
private static final int TYPE_MAX_SIZE = 20;
private static final int SCRIPT_MAX_SIZE = 1000;
private static final int INSTALLED_BY_MAX_SIZE = 100;
public class HapiMigrationEntity {
public static final int VERSION_MAX_SIZE = 50;
public static final int DESCRIPTION_MAX_SIZE = 200;
public static final int TYPE_MAX_SIZE = 20;
public static final int SCRIPT_MAX_SIZE = 1000;
public static final int INSTALLED_BY_MAX_SIZE = 100;
@Id
@SequenceGenerator(name = "SEQ_FLY_HFJ_MIGRATION", sequenceName = "SEQ_FLY_HFJ_MIGRATION")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_FLY_HFJ_MIGRATION")
@ -53,16 +53,6 @@ public class HapiMigrationEntity implements Persistable<Integer> {
@Column(name = "SUCCESS")
private Boolean mySuccess;
@Override
public Integer getId() {
return myPid;
}
@Override
public boolean isNew() {
return null == getId();
}
public Integer getPid() {
return myPid;
}
@ -151,4 +141,21 @@ public class HapiMigrationEntity implements Persistable<Integer> {
retval.setType("JDBC");
return retval;
}
public static RowMapper<HapiMigrationEntity> newRowMapper() {
return (rs, rowNum) -> {
HapiMigrationEntity entity = new HapiMigrationEntity();
entity.setPid(rs.getInt(1));
entity.setVersion(rs.getString(2));
entity.setDescription(rs.getString(3));
entity.setType(rs.getString(4));
entity.setScript(rs.getString(5));
entity.setChecksum(rs.getInt(6));
entity.setInstalledBy(rs.getString(7));
entity.setInstalledOn(rs.getTimestamp(8));
entity.setExecutionTime(rs.getInt(9));
entity.setSuccess(rs.getBoolean(10));
return entity;
};
}
}

View File

@ -1,24 +1,40 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.config.HapiMigrationConfig;
import ca.uhn.fhir.jpa.migrate.config.TestMigrationConfig;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import org.apache.commons.dbcp2.BasicDataSource;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.junit.jupiter.api.BeforeAll;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {TestMigrationConfig.class, HapiMigrationConfig.class})
public class BaseMigrationTest {
@Autowired
protected
HapiMigrationDao myHapiMigrationDao;
import javax.sql.DataSource;
public abstract class BaseMigrationTest {
private static final String TABLE_NAME = "TEST_MIGRATION_TABLE";
private static final String TEST_DBUSER = "TEST_DBUSER";
protected static HapiMigrationDao ourHapiMigrationDao;
protected static HapiMigrationStorageSvc ourHapiMigrationStorageSvc;
@BeforeAll
public static void beforeAll() {
ourHapiMigrationDao = new HapiMigrationDao(getDataSource(), TABLE_NAME);
ourHapiMigrationDao.createMigrationTableIfRequired();
ourHapiMigrationStorageSvc = new HapiMigrationStorageSvc(ourHapiMigrationDao);
}
private static DataSource getDataSource() {
BasicDataSource retVal = new BasicDataSource();
retVal.setDriver(new org.h2.Driver());
retVal.setUrl("jdbc:h2:mem:test_migration");
retVal.setMaxWaitMillis(30000);
retVal.setUsername("");
retVal.setPassword("");
retVal.setMaxTotal(5);
return retVal;
}
@AfterEach
void after() {
myHapiMigrationDao.deleteAll();
ourHapiMigrationDao.deleteAll();
}
}

View File

@ -6,12 +6,13 @@ import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.DropTableTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
import org.flywaydb.core.api.MigrationVersion;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasSize;
@ -19,8 +20,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
class HapiMigrationStorageSvcTest extends BaseMigrationTest {
private static final String RELEASE = "V5_5_0";
@Autowired
HapiMigrationStorageSvc myHapiMigrationStorageSvc;
@BeforeEach
void before() {
@ -29,20 +28,22 @@ class HapiMigrationStorageSvcTest extends BaseMigrationTest {
for (BaseTask task : tasks) {
HapiMigrationEntity entity = HapiMigrationEntity.fromBaseTask(task);
myHapiMigrationDao.save(entity);
entity.setExecutionTime(1);
entity.setSuccess(true);
ourHapiMigrationDao.save(entity);
}
}
@Test
void diff_oneNew_returnsNew() {
List<HapiMigrationEntity> appliedMigrations = myHapiMigrationStorageSvc.fetchAppliedMigrations();
Set<MigrationVersion> appliedMigrations = ourHapiMigrationStorageSvc.fetchAppliedMigrationVersions();
assertThat(appliedMigrations, hasSize(7));
List<BaseTask> tasks = createTasks();
BaseTask dropTableTask = new DropTableTask(RELEASE, "20210722.4");
tasks.add(dropTableTask);
List<BaseTask> notAppliedYet = myHapiMigrationStorageSvc.diff(tasks);
List<BaseTask> notAppliedYet = ourHapiMigrationStorageSvc.diff(tasks);
assertThat(notAppliedYet, hasSize(1));
assertEquals("5_5_0.20210722.4", notAppliedYet.get(0).getMigrationVersion());
}

View File

@ -5,7 +5,6 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.taskdef.AddTableRawSqlTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTest;
import ca.uhn.fhir.jpa.migrate.taskdef.DropTableTask;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.hamcrest.Matchers;
@ -14,8 +13,6 @@ import org.junit.jupiter.params.provider.MethodSource;
import javax.annotation.Nonnull;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.function.Supplier;
@ -26,8 +23,6 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.endsWith;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
public class SchemaMigratorTest extends BaseTest {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SchemaMigratorTest.class);
@ -40,11 +35,6 @@ public class SchemaMigratorTest extends BaseTest {
SchemaMigrator schemaMigrator = createTableMigrator();
List<BaseTask> list = new ArrayList<>();
list.add(new DropTableTask("1", "1.1"));
when(myHapiMigrationStorageSvc.diff(any())).thenReturn(list);
when(myHapiMigrationStorageSvc.getLatestAppliedVersion()).thenReturn("unknown");
try {
schemaMigrator.validate();
fail();
@ -119,7 +109,7 @@ public class SchemaMigratorTest extends BaseTest {
ImmutableList<BaseTask> taskList = ImmutableList.of(taskA, taskB, taskC, taskD);
MigrationTaskSkipper.setDoNothingOnSkippedTasks(taskList, "4_1_0.20191214.2, 4_1_0.20191214.4");
SchemaMigrator schemaMigrator = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), taskList, myHapiMigrationStorageSvc);
SchemaMigrator schemaMigrator = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), taskList, ourHapiMigrationStorageSvc);
schemaMigrator.setDriverType(getDriverType());
schemaMigrator.migrate();
@ -163,7 +153,7 @@ public class SchemaMigratorTest extends BaseTest {
@Nonnull
private SchemaMigrator createSchemaMigrator(BaseTask... tasks) {
SchemaMigrator retVal = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), Lists.newArrayList(tasks), myHapiMigrationStorageSvc);
SchemaMigrator retVal = new SchemaMigrator(getUrl(), SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), Lists.newArrayList(tasks), ourHapiMigrationStorageSvc);
retVal.setDriverType(getDriverType());
return retVal;
}

View File

@ -1,67 +0,0 @@
package ca.uhn.fhir.jpa.migrate.config;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.sql.DataSource;
import java.util.Properties;
@EnableJpaRepositories(basePackages = {"ca.uhn.fhir.jpa.migrate.dao"})
@EnableTransactionManagement
public class TestMigrationConfig {
private static final Logger ourLog = LoggerFactory.getLogger(TestMigrationConfig.class);
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory(DataSource theDataSource) {
LocalContainerEntityManagerFactoryBean retVal = new LocalContainerEntityManagerFactoryBean();
retVal.setPackagesToScan("ca.uhn.fhir.jpa.migrate.entity");
retVal.setPersistenceProvider(new HibernatePersistenceProvider());
retVal.setPersistenceUnitName("PU_Migration");
retVal.setDataSource(theDataSource);
retVal.setJpaProperties(jpaProperties());
return retVal;
}
@Bean
public DataSource dataSource() {
BasicDataSource retVal = new BasicDataSource();
retVal.setDriver(new org.h2.Driver());
retVal.setUrl("jdbc:h2:mem:test_migration");
retVal.setMaxWaitMillis(30000);
retVal.setUsername("");
retVal.setPassword("");
retVal.setMaxTotal(5);
return retVal;
}
private Properties jpaProperties() {
Properties extraProperties = new Properties();
extraProperties.put("hibernate.search.enabled", "false");
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
ourLog.info("jpaProperties: {}", extraProperties);
return extraProperties;
}
@Bean
public PlatformTransactionManager transactionManager(LocalContainerEntityManagerFactoryBean theEntityManagerFactory) {
JpaTransactionManager retVal = new JpaTransactionManager();
retVal.setEntityManagerFactory(theEntityManagerFactory.getObject());
return retVal;
}
}

View File

@ -2,9 +2,10 @@ package ca.uhn.fhir.jpa.migrate.dao;
import ca.uhn.fhir.jpa.migrate.BaseMigrationTest;
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
import org.flywaydb.core.api.MigrationVersion;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.Set;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasSize;
@ -14,25 +15,36 @@ class HapiMigrationDaoTest extends BaseMigrationTest {
@Test
public void findAll_empty_returnsNothing() {
List<HapiMigrationEntity> result = myHapiMigrationDao.findAll();
Set<MigrationVersion> result = ourHapiMigrationDao.fetchMigrationVersions();
assertThat(result, hasSize(0));
}
@Test
public void findAll_2records_returnsBoth() {
HapiMigrationEntity record1 = new HapiMigrationEntity();
String desc1 = "DESC1";
record1.setDescription(desc1);
HapiMigrationEntity result1 = myHapiMigrationDao.save(record1);
assertEquals(1, result1.getId());
HapiMigrationEntity record1 = buildEntity("DESC1", "1.1");
HapiMigrationEntity record2 = new HapiMigrationEntity();
String desc2 = "DESC2";
record2.setDescription(desc2);
HapiMigrationEntity result2 = myHapiMigrationDao.save(record2);
assertEquals(2, result2.getId());
HapiMigrationEntity result1 = ourHapiMigrationDao.save(record1);
assertEquals(1, result1.getPid());
{
Set<MigrationVersion> all = ourHapiMigrationDao.fetchMigrationVersions();
assertThat(all, hasSize(1));
}
HapiMigrationEntity record2 = buildEntity("DESC2", "1.2");
List<HapiMigrationEntity> all = myHapiMigrationDao.findAll();
assertThat(all, hasSize(2));
HapiMigrationEntity result2 = ourHapiMigrationDao.save(record2);
assertEquals(2, result2.getPid());
{
Set<MigrationVersion> all = ourHapiMigrationDao.fetchMigrationVersions();
assertThat(all, hasSize(2));
}
}
private HapiMigrationEntity buildEntity(String theDesc, String theVersion) {
HapiMigrationEntity retval = new HapiMigrationEntity();
retval.setVersion(theVersion);
retval.setDescription(theDesc);
retval.setExecutionTime(1);
retval.setSuccess(true);
return retval;
}
}

View File

@ -1,16 +1,13 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.BaseMigrationTest;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc;
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import org.apache.commons.dbcp2.BasicDataSource;
import org.intellij.lang.annotations.Language;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.ColumnMapRowMapper;
@ -24,8 +21,7 @@ import java.util.function.Supplier;
import java.util.stream.Stream;
@ExtendWith(MockitoExtension.class)
public abstract class BaseTest {
public abstract class BaseTest extends BaseMigrationTest {
private static final String DATABASE_NAME = "DATABASE";
private static final Logger ourLog = LoggerFactory.getLogger(BaseTest.class);
@ -34,8 +30,6 @@ public abstract class BaseTest {
private String myUrl;
private HapiMigrator myMigrator;
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
@Mock
protected HapiMigrationStorageSvc myHapiMigrationStorageSvc;
public static Stream<Supplier<TestDatabaseDetails>> data() {
ourLog.info("H2: {}", org.h2.Driver.class.toString());

14
pom.xml
View File

@ -833,6 +833,7 @@
<ph_schematron_version>5.6.5</ph_schematron_version>
<ph_commons_version>9.5.4</ph_commons_version>
<plexus_compiler_api_version>2.9.0</plexus_compiler_api_version>
<querydsl.version>4.4.0</querydsl.version>
<servicemix_saxon_version>9.8.0-15</servicemix_saxon_version>
<servicemix_xmlresolver_version>1.2_5</servicemix_xmlresolver_version>
<swagger_version>2.1.12</swagger_version>
@ -1004,6 +1005,19 @@
<artifactId>ph-commons</artifactId>
<version>${ph_commons_version}</version>
</dependency>
<!-- Query DSL -->
<dependency>
<groupId>com.querydsl</groupId>
<artifactId>querydsl-core</artifactId>
<version>${querydsl.version}</version>
</dependency>
<dependency>
<groupId>com.querydsl</groupId>
<artifactId>querydsl-jpa</artifactId>
<version>${querydsl.version}</version>
</dependency>
<dependency>
<groupId>com.jamesmurty.utils</groupId>
<artifactId>java-xmlbuilder</artifactId>