Flyway integration test passes
This commit is contained in:
parent
94c8f96856
commit
c8c89d976f
|
@ -162,6 +162,7 @@ public abstract class BaseApp {
|
|||
commands.add(new ExportConceptMapToCsvCommand());
|
||||
commands.add(new ImportCsvToConceptMapCommand());
|
||||
commands.add(new HapiMigrateDatabaseCommand());
|
||||
commands.add(new HapiFlywayMigrateDatabaseCommand());
|
||||
return commands;
|
||||
}
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
|
|||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -32,7 +33,9 @@ import java.util.List;
|
|||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public abstract class FlywayMigrateDatabaseCommand<T extends Enum> extends BaseCommand {
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
|
||||
public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends BaseCommand {
|
||||
|
||||
private static final String FLYWAY_MIGRATE_DATABASE = "flyway-migrate-database";
|
||||
private Set<String> myFlags;
|
||||
|
@ -98,6 +101,12 @@ public abstract class FlywayMigrateDatabaseCommand<T extends Enum> extends BaseC
|
|||
boolean dryRun = theCommandLine.hasOption("r");
|
||||
boolean noColumnShrink = theCommandLine.hasOption("no-column-shrink");
|
||||
|
||||
String flags = theCommandLine.getOptionValue("x");
|
||||
myFlags = Arrays.stream(defaultString(flags).split(","))
|
||||
.map(String::trim)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
FlywayMigrator migrator = new FlywayMigrator();
|
||||
migrator.setConnectionUrl(url);
|
||||
migrator.setDriverType(driverType);
|
||||
|
@ -105,8 +114,10 @@ public abstract class FlywayMigrateDatabaseCommand<T extends Enum> extends BaseC
|
|||
migrator.setPassword(password);
|
||||
migrator.setDryRun(dryRun);
|
||||
migrator.setNoColumnShrink(noColumnShrink);
|
||||
|
||||
addTasks(migrator);
|
||||
migrator.migrate();
|
||||
}
|
||||
|
||||
protected abstract void addTasks(FlywayMigrator theMigrator);
|
||||
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Command Line Client - API
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
|
||||
import ca.uhn.fhir.util.VersionEnum;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class HapiFlywayMigrateDatabaseCommand extends BaseFlywayMigrateDatabaseCommand<VersionEnum> {
|
||||
|
||||
@Override
|
||||
protected List<VersionEnum> provideAllowedVersions() {
|
||||
return Arrays.asList(VersionEnum.values());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<VersionEnum> provideVersionEnumType() {
|
||||
return VersionEnum.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void addTasks(FlywayMigrator theMigrator) {
|
||||
List<BaseTask<?>> tasks = new HapiFhirJpaMigrationTasks(getFlags()).getAllTasks(VersionEnum.values());
|
||||
tasks.forEach(theMigrator::addTask);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,228 @@
|
|||
package ca.uhn.fhir.cli;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.util.VersionEnum;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.jdbc.core.support.AbstractLobCreatingPreparedStatementCallback;
|
||||
import org.springframework.jdbc.support.lob.DefaultLobHandler;
|
||||
import org.springframework.jdbc.support.lob.LobCreator;
|
||||
|
||||
import javax.validation.constraints.NotNull;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Timestamp;
|
||||
import java.sql.Types;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class HapiFlywayMigrateDatabaseCommandTest {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(HapiFlywayMigrateDatabaseCommandTest.class);
|
||||
public static final String DB_DIRECTORY = "target/h2_test";
|
||||
|
||||
static {
|
||||
System.setProperty("test", "true");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMigrate() throws IOException {
|
||||
|
||||
File location = getLocation("migrator_h2_test_340_current");
|
||||
|
||||
String url = "jdbc:h2:" + location.getAbsolutePath() + ";create=true";
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
|
||||
|
||||
String initSql = "/persistence_create_h2_340.sql";
|
||||
executeSqlStatements(connectionProperties, initSql);
|
||||
|
||||
seedDatabase340(connectionProperties);
|
||||
|
||||
ourLog.info("**********************************************");
|
||||
ourLog.info("Done Setup, Starting Migration...");
|
||||
ourLog.info("**********************************************");
|
||||
|
||||
String[] args = new String[]{
|
||||
"flyway-migrate-database",
|
||||
"-d", "H2_EMBEDDED",
|
||||
"-u", url,
|
||||
"-n", "",
|
||||
"-p", ""
|
||||
};
|
||||
App.main(args);
|
||||
|
||||
connectionProperties.getTxTemplate().execute(t -> {
|
||||
JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
|
||||
List<Map<String, Object>> values = jdbcTemplate.queryForList("SELECT * FROM hfj_spidx_token");
|
||||
assertEquals(1, values.size());
|
||||
assertEquals("identifier", values.get(0).get("SP_NAME"));
|
||||
assertEquals("12345678", values.get(0).get("SP_VALUE"));
|
||||
assertTrue(values.get(0).keySet().contains("HASH_IDENTITY"));
|
||||
assertEquals(7001889285610424179L, values.get(0).get("HASH_IDENTITY"));
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private File getLocation(String theDatabaseName) throws IOException {
|
||||
File directory = new File(DB_DIRECTORY);
|
||||
if (directory.exists()) {
|
||||
FileUtils.deleteDirectory(directory);
|
||||
}
|
||||
|
||||
return new File(DB_DIRECTORY + "/" + theDatabaseName);
|
||||
}
|
||||
|
||||
private void seedDatabase340(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
|
||||
theConnectionProperties.getTxTemplate().execute(t -> {
|
||||
JdbcTemplate jdbcTemplate = theConnectionProperties.newJdbcTemplate();
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_RESOURCE (RES_DELETED_AT, RES_VERSION, FORCED_ID_PID, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_PROFILE, RES_TYPE, RES_VER, RES_ID) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setNull(1, Types.TIMESTAMP);
|
||||
thePs.setString(2, "R4");
|
||||
thePs.setNull(3, Types.BIGINT);
|
||||
thePs.setBoolean(4, false);
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setTimestamp(6, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setBoolean(7, false);
|
||||
thePs.setNull(8, Types.VARCHAR);
|
||||
thePs.setLong(9, 1L);
|
||||
thePs.setNull(10, Types.VARCHAR);
|
||||
thePs.setBoolean(11, false);
|
||||
thePs.setBoolean(12, false);
|
||||
thePs.setBoolean(13, false);
|
||||
thePs.setBoolean(14, false);
|
||||
thePs.setBoolean(15, false);
|
||||
thePs.setBoolean(16, false);
|
||||
thePs.setBoolean(17, false);
|
||||
thePs.setBoolean(18, false);
|
||||
thePs.setNull(19, Types.VARCHAR);
|
||||
thePs.setString(20, "Patient");
|
||||
thePs.setLong(21, 1L);
|
||||
thePs.setLong(22, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_RES_VER (RES_DELETED_AT, RES_VERSION, FORCED_ID_PID, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, RES_ENCODING, RES_TEXT, RES_ID, RES_TYPE, RES_VER, PID) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setNull(1, Types.TIMESTAMP);
|
||||
thePs.setString(2, "R4");
|
||||
thePs.setNull(3, Types.BIGINT);
|
||||
thePs.setBoolean(4, false);
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setTimestamp(6, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setString(7, "JSON");
|
||||
theLobCreator.setBlobAsBytes(thePs, 8, "{\"resourceType\":\"Patient\"}".getBytes(Charsets.US_ASCII));
|
||||
thePs.setLong(9, 1L);
|
||||
thePs.setString(10, "Patient");
|
||||
thePs.setLong(11, 1L);
|
||||
thePs.setLong(12, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_SPIDX_STRING (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_VALUE_EXACT, SP_VALUE_NORMALIZED, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setBoolean(1, false);
|
||||
thePs.setString(2, "given");
|
||||
thePs.setLong(3, 1L); // res-id
|
||||
thePs.setString(4, "Patient");
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setString(6, "ROBERT");
|
||||
thePs.setString(7, "Robert");
|
||||
thePs.setLong(8, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setBoolean(1, false);
|
||||
thePs.setString(2, "identifier");
|
||||
thePs.setLong(3, 1L); // res-id
|
||||
thePs.setString(4, "Patient");
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setString(6, "http://foo");
|
||||
thePs.setString(7, "12345678");
|
||||
thePs.setLong(8, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_SPIDX_DATE (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_VALUE_HIGH, SP_VALUE_LOW, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setBoolean(1, false);
|
||||
thePs.setString(2, "birthdate");
|
||||
thePs.setLong(3, 1L); // res-id
|
||||
thePs.setString(4, "Patient");
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setTimestamp(6, new Timestamp(1000000000L)); // value high
|
||||
thePs.setTimestamp(7, new Timestamp(1000000000L)); // value low
|
||||
thePs.setLong(8, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private void executeSqlStatements(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theInitSql) throws
|
||||
IOException {
|
||||
String script = IOUtils.toString(HapiFlywayMigrateDatabaseCommandTest.class.getResourceAsStream(theInitSql), Charsets.UTF_8);
|
||||
List<String> scriptStatements = new ArrayList<>(Arrays.asList(script.split("\n")));
|
||||
for (int i = 0; i < scriptStatements.size(); i++) {
|
||||
String nextStatement = scriptStatements.get(i);
|
||||
if (isBlank(nextStatement)) {
|
||||
scriptStatements.remove(i);
|
||||
i--;
|
||||
continue;
|
||||
}
|
||||
|
||||
nextStatement = nextStatement.trim();
|
||||
while (nextStatement.endsWith(";")) {
|
||||
nextStatement = nextStatement.substring(0, nextStatement.length() - 1);
|
||||
}
|
||||
scriptStatements.set(i, nextStatement);
|
||||
}
|
||||
|
||||
theConnectionProperties.getTxTemplate().execute(t -> {
|
||||
for (String next : scriptStatements) {
|
||||
theConnectionProperties.newJdbcTemplate().execute(next);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -40,8 +40,6 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
|
|||
public class FlywayMigrator {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(FlywayMigrator.class);
|
||||
// FIXME KHS
|
||||
private static int ourVersion = 0;
|
||||
|
||||
private DriverTypeEnum myDriverType;
|
||||
private String myConnectionUrl;
|
||||
|
@ -68,7 +66,9 @@ public class FlywayMigrator {
|
|||
}
|
||||
|
||||
public void addTask(BaseTask<?> theTask) {
|
||||
myTasks.add(new FlywayMigration(theTask, this));
|
||||
if (!theTask.isLogMessage()) {
|
||||
myTasks.add(new FlywayMigration(theTask, this));
|
||||
}
|
||||
}
|
||||
|
||||
public void setDryRun(boolean theDryRun) {
|
||||
|
@ -93,7 +93,6 @@ public class FlywayMigrator {
|
|||
theTasks.forEach(this::addTask);
|
||||
}
|
||||
|
||||
|
||||
public void setNoColumnShrink(boolean theNoColumnShrink) {
|
||||
myNoColumnShrink = theNoColumnShrink;
|
||||
}
|
||||
|
|
|
@ -170,5 +170,7 @@ public abstract class BaseTask<T extends BaseTask> {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
public boolean isLogMessage() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,8 +28,8 @@ public class LogStartSectionWithMessageTask extends BaseTask {
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(LogStartSectionWithMessageTask.class);
|
||||
private final String myMessage;
|
||||
|
||||
public LogStartSectionWithMessageTask(String theRelease, String theVersion, String theMessage) {
|
||||
super(theRelease, theVersion);
|
||||
public LogStartSectionWithMessageTask(String theMessage) {
|
||||
super(null, null);
|
||||
myMessage = theMessage;
|
||||
}
|
||||
|
||||
|
@ -45,4 +45,9 @@ public class LogStartSectionWithMessageTask extends BaseTask {
|
|||
ourLog.info(myMessage);
|
||||
ourLog.info(StringUtils.leftPad("", myMessage.length(), "*"));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLogMessage() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -247,7 +247,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
termValueSetTable
|
||||
.addIndex("IDX_VALUESET_URL")
|
||||
.unique(true)
|
||||
.withColumns("20190722.30.1", "URL");
|
||||
.withColumns("20190722.31.1", "URL");
|
||||
termValueSetTable.addColumn("RES_ID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||
termValueSetTable
|
||||
.addForeignKey("FK_TRMVALUESET_RES")
|
||||
|
@ -265,7 +265,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
termValueSetTableChange
|
||||
.addIndex("IDX_VALUESET_EXP_STATUS")
|
||||
.unique(false)
|
||||
.withColumns("20190722.34.1", "EXPANSION_STATUS");
|
||||
.withColumns("20190722.35.1", "EXPANSION_STATUS");
|
||||
|
||||
// TermValueSetConcept
|
||||
version.startSectionWithMessage("Processing table: TRM_VALUESET_CONCEPT");
|
||||
|
@ -346,8 +346,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
version.onTable("HFJ_SPIDX_STRING").addIndex("IDX_SP_STRING_HASH_IDENT").unique(false).withColumns("20190905.12.1", "HASH_IDENTITY");
|
||||
version.onTable("HFJ_SPIDX_COORDS").modifyColumn("RES_TYPE").nonNullable().withType("20190905.13", BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||
version.onTable("HFJ_SPIDX_QUANTITY").modifyColumn("RES_TYPE").nonNullable().withType("20190905.14", BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190905.13", "HASH_UNITS_AND_VALPREFIX");
|
||||
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190905.14", "HASH_VALPREFIX");
|
||||
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190905.14.1", "HASH_UNITS_AND_VALPREFIX");
|
||||
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190905.14.2", "HASH_VALPREFIX");
|
||||
version.onTable("HFJ_SPIDX_NUMBER").modifyColumn("RES_TYPE").nonNullable().withType("20190905.15", BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||
version.onTable("HFJ_SPIDX_TOKEN").modifyColumn("RES_TYPE").nonNullable().withType("20190905.16", BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||
version.onTable("HFJ_SPIDX_URI").modifyColumn("RES_TYPE").nonNullable().withType("20190905.17", BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||
|
@ -443,11 +443,11 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.type(AddColumnTask.ColumnTypeEnum.LONG);
|
||||
if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
|
||||
spidxDate
|
||||
.dropIndex("20180903.4", "IDX_SP_TOKEN");
|
||||
.dropIndex("20180903.4.1", "IDX_SP_TOKEN");
|
||||
spidxDate
|
||||
.addIndex("IDX_SP_DATE_HASH")
|
||||
.unique(false)
|
||||
.withColumns("20180903.4.1","HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH");
|
||||
.withColumns("20180903.4.2","HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH");
|
||||
spidxDate
|
||||
.dropIndex("20180903.5", "IDX_SP_DATE");
|
||||
spidxDate
|
||||
|
@ -817,7 +817,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.addSql(DriverTypeEnum.MSSQL_2012, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
|
||||
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT");
|
||||
|
||||
version.onTable("HFJ_IDX_CMP_STRING_UNIQ").modifyColumn("IDX_STRING").nonNullable().withType("20180907.6", BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
version.onTable("HFJ_IDX_CMP_STRING_UNIQ").modifyColumn("IDX_STRING").nonNullable().withType("20180907.6.1", BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -68,6 +68,21 @@ public class BaseMigrationTasks<T extends Enum> {
|
|||
return new Builder(theRelease.name(), sink);
|
||||
}
|
||||
|
||||
public List<BaseTask<?>> getAllTasks(T[] theVersionEnumValues) {
|
||||
List<BaseTask<?>> retval = new ArrayList<>();
|
||||
for (T nextVersion : theVersionEnumValues) {
|
||||
retval.add(new LogStartSectionWithMessageTask("------------------------------------------------"));
|
||||
retval.add(new LogStartSectionWithMessageTask("Starting migrations for version " + nextVersion));
|
||||
retval.add(new LogStartSectionWithMessageTask("------------------------------------------------"));
|
||||
Collection<BaseTask<?>> nextValues = myTasks.get(nextVersion);
|
||||
if (nextValues != null) {
|
||||
retval.addAll(nextValues);
|
||||
}
|
||||
}
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
public interface IAcceptsTasks {
|
||||
void addTask(BaseTask<?> theTask);
|
||||
}
|
||||
|
@ -106,7 +121,7 @@ public class BaseMigrationTasks<T extends Enum> {
|
|||
|
||||
public Builder startSectionWithMessage(String theMessage) {
|
||||
Validate.notBlank(theMessage);
|
||||
addTask(new LogStartSectionWithMessageTask(myRelease, "log message", theMessage));
|
||||
addTask(new LogStartSectionWithMessageTask(theMessage));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue