mirror of
https://github.com/hapifhir/hapi-fhir.git
synced 2025-03-09 14:33:32 +00:00
Add result column to migration table (#6580)
* add skipped flag * switch to string * changelog * changelog * migrate table * migrate table * spotless * merge master * exception code * exception code * fix regression
This commit is contained in:
parent
b51ccdc696
commit
f2f069f486
@ -0,0 +1,7 @@
|
||||
---
|
||||
type: add
|
||||
issue: 6580
|
||||
title: "A new `RESULT` column has been added to the database migration table to record the migration execution result.
|
||||
values are `SKIPPED` (either skipped via the `skip-versions` flag or if the migration task was stubbed),
|
||||
`DOES_NOT_APPLY` (does not apply to that database), `PRECONDITION_FAILED` (not run based on a SQL script outcome),
|
||||
ALLOWED_TO_FAIL (the migration failed, but it is permitted to fail), SUCCESS."
|
@ -80,6 +80,9 @@ public class HapiMigrationStorageSvc {
|
||||
HapiMigrationEntity entity = HapiMigrationEntity.fromBaseTask(theBaseTask);
|
||||
entity.setExecutionTime(theMillis);
|
||||
entity.setSuccess(theSuccess);
|
||||
if (theBaseTask.getExecutionResult() != null) {
|
||||
entity.setResult(theBaseTask.getExecutionResult().name());
|
||||
}
|
||||
myHapiMigrationDao.save(entity);
|
||||
}
|
||||
|
||||
|
@ -209,6 +209,7 @@ public class HapiMigrator {
|
||||
theTask.execute();
|
||||
recordTaskAsCompletedIfNotDryRun(theTask, sw.getMillis(), true);
|
||||
theMigrationResult.changes += theTask.getChangesCount();
|
||||
theMigrationResult.executionResult = theTask.getExecutionResult();
|
||||
theMigrationResult.executedStatements.addAll(theTask.getExecutedStatements());
|
||||
theMigrationResult.succeededTasks.add(theTask);
|
||||
} catch (SQLException | HapiMigrationException e) {
|
||||
|
@ -20,6 +20,7 @@
|
||||
package ca.uhn.fhir.jpa.migrate;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.MigrationTaskExecutionResultEnum;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
@ -29,6 +30,7 @@ public class MigrationResult {
|
||||
public final List<BaseTask.ExecutedStatement> executedStatements = new ArrayList<>();
|
||||
public final List<BaseTask> succeededTasks = new ArrayList<>();
|
||||
public final List<BaseTask> failedTasks = new ArrayList<>();
|
||||
public MigrationTaskExecutionResultEnum executionResult;
|
||||
|
||||
public String summary() {
|
||||
return String.format(
|
||||
|
@ -106,6 +106,11 @@ public class HapiMigrationDao {
|
||||
|
||||
public boolean createMigrationTableIfRequired() {
|
||||
if (migrationTableExists()) {
|
||||
if (!columnExists("result")) {
|
||||
String addResultColumnStatement = myMigrationQueryBuilder.addResultColumnStatement();
|
||||
ourLog.info(addResultColumnStatement);
|
||||
myJdbcTemplate.execute(addResultColumnStatement);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
ourLog.info("Creating table {}", myMigrationTablename);
|
||||
@ -144,6 +149,29 @@ public class HapiMigrationDao {
|
||||
}
|
||||
}
|
||||
|
||||
private boolean columnExists(String theColumnName) {
|
||||
try (Connection connection = myDataSource.getConnection()) {
|
||||
ResultSet columnsUpper = connection
|
||||
.getMetaData()
|
||||
.getColumns(
|
||||
connection.getCatalog(),
|
||||
connection.getSchema(),
|
||||
myMigrationTablename,
|
||||
theColumnName.toUpperCase());
|
||||
ResultSet columnsLower = connection
|
||||
.getMetaData()
|
||||
.getColumns(
|
||||
connection.getCatalog(),
|
||||
connection.getSchema(),
|
||||
myMigrationTablename,
|
||||
theColumnName.toLowerCase());
|
||||
|
||||
return columnsUpper.next() || columnsLower.next(); // If there's a row, the column exists
|
||||
} catch (SQLException e) {
|
||||
throw new InternalErrorException(Msg.code(2615) + "Error checking column existence: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
public List<HapiMigrationEntity> findAll() {
|
||||
String allQuery = myMigrationQueryBuilder.findAllQuery();
|
||||
ourLog.debug("Executing query: [{}]", allQuery);
|
||||
|
@ -23,6 +23,7 @@ import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeToDriverTypeToSqlType;
|
||||
import com.healthmarketscience.sqlbuilder.AlterTableQuery;
|
||||
import com.healthmarketscience.sqlbuilder.BinaryCondition;
|
||||
import com.healthmarketscience.sqlbuilder.CreateIndexQuery;
|
||||
import com.healthmarketscience.sqlbuilder.CreateTableQuery;
|
||||
@ -55,6 +56,7 @@ public class MigrationQueryBuilder {
|
||||
private final DbColumn myInstalledOnCol;
|
||||
private final DbColumn myExecutionTimeCol;
|
||||
private final DbColumn mySuccessCol;
|
||||
private final DbColumn myResultCol;
|
||||
private final String myDeleteAll;
|
||||
private final String myHighestKeyQuery;
|
||||
private final DriverTypeEnum myDriverType;
|
||||
@ -102,6 +104,8 @@ public class MigrationQueryBuilder {
|
||||
mySuccessCol = myTable.addColumn("\"success\"", myBooleanType, null);
|
||||
mySuccessCol.notNull();
|
||||
|
||||
myResultCol = myTable.addColumn("\"result\"", Types.VARCHAR, HapiMigrationEntity.RESULT_MAX_SIZE);
|
||||
|
||||
myDeleteAll = new DeleteQuery(myTable).toString();
|
||||
myHighestKeyQuery = buildHighestKeyQuery();
|
||||
}
|
||||
@ -133,7 +137,8 @@ public class MigrationQueryBuilder {
|
||||
myInstalledByCol,
|
||||
myInstalledOnCol,
|
||||
myExecutionTimeCol,
|
||||
mySuccessCol)
|
||||
mySuccessCol,
|
||||
myResultCol)
|
||||
.validate()
|
||||
.toString();
|
||||
}
|
||||
@ -142,6 +147,10 @@ public class MigrationQueryBuilder {
|
||||
return new CreateTableQuery(myTable, true).validate().toString();
|
||||
}
|
||||
|
||||
public String addResultColumnStatement() {
|
||||
return new AlterTableQuery(myTable).setAddColumn(myResultCol).validate().toString();
|
||||
}
|
||||
|
||||
public String createIndexStatement() {
|
||||
return new CreateIndexQuery(myTable, myMigrationTablename.toUpperCase() + "_PK_INDEX")
|
||||
.setIndexType(CreateIndexQuery.IndexType.UNIQUE)
|
||||
|
@ -41,6 +41,7 @@ public class HapiMigrationEntity {
|
||||
public static final int TYPE_MAX_SIZE = 20;
|
||||
public static final int SCRIPT_MAX_SIZE = 1000;
|
||||
public static final int INSTALLED_BY_MAX_SIZE = 100;
|
||||
public static final int RESULT_MAX_SIZE = 100;
|
||||
public static final int CREATE_TABLE_PID = -1;
|
||||
public static final String INITIAL_RECORD_DESCRIPTION = "<< HAPI FHIR Schema History table created >>";
|
||||
public static final String INITIAL_RECORD_SCRIPT = "HAPI FHIR";
|
||||
@ -80,6 +81,9 @@ public class HapiMigrationEntity {
|
||||
@Column(name = "SUCCESS")
|
||||
private Boolean mySuccess;
|
||||
|
||||
@Column(name = "RESULT", length = RESULT_MAX_SIZE)
|
||||
private String myResult;
|
||||
|
||||
public static HapiMigrationEntity tableCreatedRecord() {
|
||||
HapiMigrationEntity retVal = new HapiMigrationEntity();
|
||||
retVal.setPid(CREATE_TABLE_PID);
|
||||
@ -195,6 +199,7 @@ public class HapiMigrationEntity {
|
||||
entity.setInstalledOn(rs.getDate(8));
|
||||
entity.setExecutionTime(rs.getInt(9));
|
||||
entity.setSuccess(rs.getBoolean(10));
|
||||
entity.setResult(rs.getString(11));
|
||||
return entity;
|
||||
};
|
||||
}
|
||||
@ -219,6 +224,15 @@ public class HapiMigrationEntity {
|
||||
: null);
|
||||
ps.setInt(9, getExecutionTime());
|
||||
ps.setBoolean(10, getSuccess());
|
||||
ps.setString(11, getResult());
|
||||
};
|
||||
}
|
||||
|
||||
public String getResult() {
|
||||
return myResult;
|
||||
}
|
||||
|
||||
public void setResult(String theResult) {
|
||||
myResult = theResult;
|
||||
}
|
||||
}
|
||||
|
@ -74,6 +74,7 @@ public abstract class BaseTask {
|
||||
private DriverTypeEnum myDriverType;
|
||||
private String myDescription;
|
||||
private Integer myChangesCount = 0;
|
||||
private MigrationTaskExecutionResultEnum myExecutionResult;
|
||||
private boolean myDryRun;
|
||||
private boolean myTransactional = true;
|
||||
private Set<DriverTypeEnum> myOnlyAppliesToPlatforms = new HashSet<>();
|
||||
@ -210,6 +211,7 @@ public abstract class BaseTask {
|
||||
} else {
|
||||
int changesCount = jdbcTemplate.update(theSql, theArguments);
|
||||
logInfo(ourLog, "SQL \"{}\" returned {}", theSql, changesCount);
|
||||
myExecutionResult = MigrationTaskExecutionResultEnum.APPLIED;
|
||||
return changesCount;
|
||||
}
|
||||
} catch (DataAccessException e) {
|
||||
@ -218,6 +220,7 @@ public abstract class BaseTask {
|
||||
"Task {} did not exit successfully on doExecuteSql(), but task is allowed to fail",
|
||||
getMigrationVersion());
|
||||
ourLog.debug("Error was: {}", e.getMessage(), e);
|
||||
myExecutionResult = MigrationTaskExecutionResultEnum.NOT_APPLIED_ALLOWED_FAILURE;
|
||||
return 0;
|
||||
} else {
|
||||
throw new HapiMigrationException(
|
||||
@ -262,11 +265,13 @@ public abstract class BaseTask {
|
||||
public void execute() throws SQLException {
|
||||
if (myFlags.contains(TaskFlagEnum.DO_NOTHING)) {
|
||||
ourLog.info("Skipping stubbed task: {}", getDescription());
|
||||
myExecutionResult = MigrationTaskExecutionResultEnum.NOT_APPLIED_SKIPPED;
|
||||
return;
|
||||
}
|
||||
if (!myOnlyAppliesToPlatforms.isEmpty()) {
|
||||
if (!myOnlyAppliesToPlatforms.contains(getDriverType())) {
|
||||
ourLog.info("Skipping task {} as it does not apply to {}", getDescription(), getDriverType());
|
||||
myExecutionResult = MigrationTaskExecutionResultEnum.NOT_APPLIED_NOT_FOR_THIS_DATABASE;
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -277,6 +282,7 @@ public abstract class BaseTask {
|
||||
ourLog.info(
|
||||
"Skipping task since one of the preconditions was not met: {}",
|
||||
precondition.getPreconditionReason());
|
||||
myExecutionResult = MigrationTaskExecutionResultEnum.NOT_APPLIED_PRECONDITION_NOT_MET;
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -356,6 +362,10 @@ public abstract class BaseTask {
|
||||
return myFlags.contains(theFlag);
|
||||
}
|
||||
|
||||
public MigrationTaskExecutionResultEnum getExecutionResult() {
|
||||
return myExecutionResult;
|
||||
}
|
||||
|
||||
public static class ExecutedStatement {
|
||||
private final String mySql;
|
||||
private final List<Object> myArguments;
|
||||
|
@ -0,0 +1,28 @@
|
||||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
public enum MigrationTaskExecutionResultEnum {
|
||||
/**
|
||||
* Was either skipped via the `skip-versions` flag or the migration task was stubbed
|
||||
*/
|
||||
NOT_APPLIED_SKIPPED,
|
||||
|
||||
/**
|
||||
* This migration task does not apply to this database
|
||||
*/
|
||||
NOT_APPLIED_NOT_FOR_THIS_DATABASE,
|
||||
|
||||
/**
|
||||
* This migration task had precondition criteria (expressed as SQL) that was not met
|
||||
*/
|
||||
NOT_APPLIED_PRECONDITION_NOT_MET,
|
||||
|
||||
/**
|
||||
* The migration failed, but the task has the FAILURE_ALLOWED flag set.
|
||||
*/
|
||||
NOT_APPLIED_ALLOWED_FAILURE,
|
||||
|
||||
/**
|
||||
* The migration was applied
|
||||
*/
|
||||
APPLIED,
|
||||
}
|
@ -17,7 +17,7 @@ public abstract class BaseMigrationTest {
|
||||
ourHapiMigrationStorageSvc = new HapiMigrationStorageSvc(ourHapiMigrationDao);
|
||||
}
|
||||
|
||||
static BasicDataSource getDataSource() {
|
||||
public static BasicDataSource getDataSource() {
|
||||
BasicDataSource retVal = new BasicDataSource();
|
||||
retVal.setDriver(new org.h2.Driver());
|
||||
retVal.setUrl("jdbc:h2:mem:test_migration");
|
||||
|
@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.AddTableRawSqlTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTest;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.MigrationTaskExecutionResultEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Lists;
|
||||
@ -25,6 +26,8 @@ import java.util.function.Supplier;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.mock;
|
||||
@ -164,6 +167,14 @@ public class SchemaMigratorTest extends BaseTest {
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = super.getDriverType().newConnectionProperties(getDataSource().getUrl(), getDataSource().getUsername(), getDataSource().getPassword());
|
||||
Set<String> tableNames = JdbcUtils.getTableNames(connectionProperties);
|
||||
assertThat(tableNames).containsExactlyInAnyOrder("SOMETABLE_A", "SOMETABLE_C");
|
||||
|
||||
List<HapiMigrationEntity> entities = myHapiMigrationDao.findAll();
|
||||
|
||||
assertThat(entities).hasSize(4);
|
||||
assertThat(entities.get(0).getResult()).isEqualTo(MigrationTaskExecutionResultEnum.APPLIED.name());
|
||||
assertThat(entities.get(1).getResult()).isEqualTo(MigrationTaskExecutionResultEnum.NOT_APPLIED_SKIPPED.name());
|
||||
assertThat(entities.get(2).getResult()).isEqualTo(MigrationTaskExecutionResultEnum.APPLIED.name());
|
||||
assertThat(entities.get(3).getResult()).isEqualTo(MigrationTaskExecutionResultEnum.NOT_APPLIED_SKIPPED.name());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
@ -0,0 +1,75 @@
|
||||
package ca.uhn.fhir.jpa.migrate.dao;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.BaseMigrationTest;
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class AlterMigrationTableIT {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(AlterMigrationTableIT.class);
|
||||
|
||||
static final String TABLE_NAME = "TEST_MIGRATION_TABLE";
|
||||
protected static HapiMigrationDao ourHapiMigrationDao;
|
||||
static JdbcTemplate ourJdbcTemplate;
|
||||
protected static HapiMigrationStorageSvc ourHapiMigrationStorageSvc;
|
||||
|
||||
@BeforeAll
|
||||
public static void beforeAll() {
|
||||
BasicDataSource dataSource = BaseMigrationTest.getDataSource();
|
||||
ourHapiMigrationDao = new HapiMigrationDao(dataSource, DriverTypeEnum.H2_EMBEDDED, TABLE_NAME);
|
||||
ourJdbcTemplate = new JdbcTemplate(dataSource);
|
||||
createOldTable();
|
||||
}
|
||||
|
||||
private static void createOldTable() {
|
||||
String oldSchema = """
|
||||
CREATE TABLE "TEST_MIGRATION_TABLE" (
|
||||
"installed_rank" INTEGER NOT NULL,
|
||||
"version" VARCHAR(50),"description" VARCHAR(200) NOT NULL,
|
||||
"type" VARCHAR(20) NOT NULL,"script" VARCHAR(1000) NOT NULL,
|
||||
"checksum" INTEGER,
|
||||
"installed_by" VARCHAR(100) NOT NULL,
|
||||
"installed_on" DATE NOT NULL,
|
||||
"execution_time" INTEGER NOT NULL,
|
||||
"success" boolean NOT NULL)
|
||||
""";
|
||||
ourJdbcTemplate.execute(oldSchema);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testNewColumnAdded() {
|
||||
assertFalse(doesColumnExist("TEST_MIGRATION_TABLE", "RESULT"));
|
||||
ourHapiMigrationDao.createMigrationTableIfRequired();
|
||||
assertTrue(doesColumnExist("TEST_MIGRATION_TABLE", "RESULT"));
|
||||
}
|
||||
|
||||
private boolean doesColumnExist(String theTableName, String theColumnName) {
|
||||
String sql = """
|
||||
SELECT COLUMN_NAME
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_NAME = ?
|
||||
""";
|
||||
|
||||
List<String> columns = ourJdbcTemplate.query(
|
||||
sql,
|
||||
new Object[] { theTableName.toUpperCase() },
|
||||
(rs, rowNum) -> rs.getString("COLUMN_NAME")
|
||||
);
|
||||
|
||||
ourLog.info("Columns in table '{}': {}", theTableName, columns);
|
||||
|
||||
return columns.stream()
|
||||
.map(String::toUpperCase)
|
||||
.anyMatch(columnName -> columnName.equals(theColumnName.toUpperCase()));
|
||||
}
|
||||
}
|
@ -3,12 +3,14 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
|
||||
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
@ -279,6 +281,9 @@ public class ModifyColumnTest extends BaseTest {
|
||||
getMigrator().migrate();
|
||||
assertEquals(ColumnTypeEnum.STRING, JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL").getColumnTypeEnum());
|
||||
|
||||
List<HapiMigrationEntity> entities = myHapiMigrationDao.findAll();
|
||||
assertThat(entities).hasSize(1);
|
||||
assertThat(entities.get(0).getResult()).isEqualTo(MigrationTaskExecutionResultEnum.NOT_APPLIED_ALLOWED_FAILURE.name());
|
||||
}
|
||||
|
||||
@ParameterizedTest(name = "{index}: {0}")
|
||||
|
Loading…
x
Reference in New Issue
Block a user