Merge remote-tracking branch 'remotes/origin/master' into ks-20191119-scheduler

This commit is contained in:
Ken Stevens 2019-11-27 14:20:15 -05:00
commit 319eead414
63 changed files with 2330 additions and 1006 deletions

View File

@ -248,6 +248,11 @@
<artifactId>jansi</artifactId>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
</dependency>
<!-- Test Deps -->
<dependency>
<groupId>org.awaitility</groupId>

View File

@ -161,7 +161,7 @@ public abstract class BaseApp {
commands.add(new IgPackUploader());
commands.add(new ExportConceptMapToCsvCommand());
commands.add(new ImportCsvToConceptMapCommand());
commands.add(new HapiMigrateDatabaseCommand());
commands.add(new HapiFlywayMigrateDatabaseCommand());
return commands;
}

View File

@ -21,7 +21,7 @@ package ca.uhn.fhir.cli;
*/
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.Migrator;
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
@ -35,10 +35,12 @@ import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.defaultString;
public abstract class BaseMigrateDatabaseCommand<T extends Enum> extends BaseCommand {
public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends BaseCommand {
private static final String MIGRATE_DATABASE = "migrate-database";
public static final String MIGRATE_DATABASE = "migrate-database";
private Set<String> myFlags;
private String myMigrationTableName;
protected Set<String> getFlags() {
return myFlags;
@ -46,7 +48,7 @@ public abstract class BaseMigrateDatabaseCommand<T extends Enum> extends BaseCom
@Override
public String getCommandDescription() {
return "This command migrates a HAPI FHIR JPA database from one version of HAPI FHIR to a newer version";
return "This command migrates a HAPI FHIR JPA database to the current version";
}
protected abstract List<T> provideAllowedVersions();
@ -74,8 +76,6 @@ public abstract class BaseMigrateDatabaseCommand<T extends Enum> extends BaseCom
addRequiredOption(retVal, "u", "url", "URL", "The JDBC database URL");
addRequiredOption(retVal, "n", "username", "Username", "The JDBC database username");
addRequiredOption(retVal, "p", "password", "Password", "The JDBC database password");
addRequiredOption(retVal, "f", "from", "Version", "The database schema version to migrate FROM");
addRequiredOption(retVal, "t", "to", "Version", "The database schema version to migrate TO");
addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
addOptionalOption(retVal, "x", "flags", "Flags", "A comma-separated list of any specific migration flags (these flags are version specific, see migrator documentation for details)");
addOptionalOption(retVal, null, "no-column-shrink", false, "If this flag is set, the system will not attempt to reduce the length of columns. This is useful in environments with a lot of existing data, where shrinking a column can take a very long time.");
@ -101,11 +101,6 @@ public abstract class BaseMigrateDatabaseCommand<T extends Enum> extends BaseCom
throw new ParseException("Invalid driver type \"" + driverTypeString + "\". Valid values are: " + driverOptions());
}
T from = getAndParseOptionEnum(theCommandLine, "f", provideVersionEnumType(), true, null);
validateVersionSupported(from);
T to = getAndParseOptionEnum(theCommandLine, "t", provideVersionEnumType(), true, null);
validateVersionSupported(to);
boolean dryRun = theCommandLine.hasOption("r");
boolean noColumnShrink = theCommandLine.hasOption("no-column-shrink");
@ -115,23 +110,20 @@ public abstract class BaseMigrateDatabaseCommand<T extends Enum> extends BaseCom
.filter(StringUtils::isNotBlank)
.collect(Collectors.toSet());
Migrator migrator = new Migrator();
FlywayMigrator migrator = new FlywayMigrator(myMigrationTableName);
migrator.setConnectionUrl(url);
migrator.setDriverType(driverType);
migrator.setUsername(username);
migrator.setPassword(password);
migrator.setDryRun(dryRun);
migrator.setNoColumnShrink(noColumnShrink);
addTasks(migrator, from, to);
addTasks(migrator);
migrator.migrate();
}
private void validateVersionSupported(T theFrom) throws ParseException {
if (provideAllowedVersions().contains(theFrom) == false) {
throw new ParseException("The version " + theFrom + " is not supported for migration");
}
}
protected abstract void addTasks(FlywayMigrator theMigrator);
protected abstract void addTasks(Migrator theMigrator, T theFrom, T theTo);
public void setMigrationTableName(String theMigrationTableName) {
myMigrationTableName = theMigrationTableName;
}
}

View File

@ -20,15 +20,18 @@ package ca.uhn.fhir.cli;
* #L%
*/
import ca.uhn.fhir.jpa.migrate.Migrator;
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.ParseException;
import java.util.Arrays;
import java.util.List;
public class HapiMigrateDatabaseCommand extends BaseMigrateDatabaseCommand<VersionEnum> {
public class HapiFlywayMigrateDatabaseCommand extends BaseFlywayMigrateDatabaseCommand<VersionEnum> {
@Override
protected List<VersionEnum> provideAllowedVersions() {
@ -41,8 +44,14 @@ public class HapiMigrateDatabaseCommand extends BaseMigrateDatabaseCommand<Versi
}
@Override
protected void addTasks(Migrator theMigrator, VersionEnum theFrom, VersionEnum theTo) {
List<BaseTask<?>> tasks = new HapiFhirJpaMigrationTasks(getFlags()).getTasks(theFrom, theTo);
tasks.forEach(theMigrator::addTask);
protected void addTasks(FlywayMigrator theMigrator) {
List<BaseTask<?>> tasks = new HapiFhirJpaMigrationTasks(getFlags()).getAllTasks(VersionEnum.values());
theMigrator.addTasks(tasks);
}
@Override
public void run(CommandLine theCommandLine) throws ParseException {
setMigrationTableName(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME);
super.run(theCommandLine);
}
}

View File

@ -1,7 +1,7 @@
package ca.uhn.fhir.cli;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.util.VersionEnum;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import com.google.common.base.Charsets;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
@ -26,13 +26,11 @@ import java.util.List;
import java.util.Map;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.*;
public class HapiMigrateDatabaseCommandTest {
public class HapiFlywayMigrateDatabaseCommandTest {
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrateDatabaseCommandTest.class);
private static final Logger ourLog = LoggerFactory.getLogger(HapiFlywayMigrateDatabaseCommandTest.class);
public static final String DB_DIRECTORY = "target/h2_test";
static {
@ -40,7 +38,7 @@ public class HapiMigrateDatabaseCommandTest {
}
@Test
public void testMigrate_340_current() throws IOException {
public void testMigrateFrom340() throws IOException, SQLException {
File location = getLocation("migrator_h2_test_340_current");
@ -57,15 +55,15 @@ public class HapiMigrateDatabaseCommandTest {
ourLog.info("**********************************************");
String[] args = new String[]{
"migrate-database",
BaseFlywayMigrateDatabaseCommand.MIGRATE_DATABASE,
"-d", "H2_EMBEDDED",
"-u", url,
"-n", "",
"-p", "",
"-f", "V3_4_0",
"-t", VersionEnum.latestVersion().toString()
"-p", ""
};
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RES_REINDEX_JOB"));
App.main(args);
assertTrue(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RES_REINDEX_JOB"));
connectionProperties.getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
@ -79,6 +77,33 @@ public class HapiMigrateDatabaseCommandTest {
});
}
@Test
public void testMigrateFromEmptySchema() throws IOException, SQLException {
File location = getLocation("migrator_h2_test_empty_current");
String url = "jdbc:h2:" + location.getAbsolutePath() + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
ourLog.info("**********************************************");
ourLog.info("Starting Migration...");
ourLog.info("**********************************************");
String[] args = new String[]{
BaseFlywayMigrateDatabaseCommand.MIGRATE_DATABASE,
"-d", "H2_EMBEDDED",
"-u", url,
"-n", "",
"-p", ""
};
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RESOURCE"));
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB"));
App.main(args);
assertTrue(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RESOURCE")); // Early table
assertTrue(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_BLK_EXPORT_JOB")); // Late table
}
@NotNull
private File getLocation(String theDatabaseName) throws IOException {
File directory = new File(DB_DIRECTORY);
@ -89,111 +114,6 @@ public class HapiMigrateDatabaseCommandTest {
return new File(DB_DIRECTORY + "/" + theDatabaseName);
}
@Test
public void testMigrate_340_370() throws IOException {
File location = getLocation("migrator_h2_test_340_360");
String url = "jdbc:h2:" + location.getAbsolutePath() + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
String initSql = "/persistence_create_h2_340.sql";
executeSqlStatements(connectionProperties, initSql);
seedDatabase340(connectionProperties);
ourLog.info("**********************************************");
ourLog.info("Done Setup, Starting Migration...");
ourLog.info("**********************************************");
String[] args = new String[]{
"migrate-database",
"-d", "H2_EMBEDDED",
"-u", url,
"-n", "",
"-p", "",
"-f", "V3_4_0",
"-t", "V3_7_0"
};
App.main(args);
connectionProperties.getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
List<Map<String, Object>> values = jdbcTemplate.queryForList("SELECT * FROM hfj_spidx_token");
assertEquals(1, values.size());
assertEquals("identifier", values.get(0).get("SP_NAME"));
assertEquals("12345678", values.get(0).get("SP_VALUE"));
assertTrue(values.get(0).keySet().contains("HASH_IDENTITY"));
assertEquals(7001889285610424179L, values.get(0).get("HASH_IDENTITY"));
return null;
});
}
@Test
public void testMigrate_340_350() throws IOException {
File location = getLocation("migrator_h2_test_340_350");
String url = "jdbc:h2:" + location.getAbsolutePath() + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
String initSql = "/persistence_create_h2_340.sql";
executeSqlStatements(connectionProperties, initSql);
seedDatabase340(connectionProperties);
ourLog.info("**********************************************");
ourLog.info("Done Setup, Starting Dry Run...");
ourLog.info("**********************************************");
String[] args = new String[]{
"migrate-database",
"-d", "H2_EMBEDDED",
"-u", url,
"-n", "",
"-p", "",
"-r",
"-f", "V3_4_0",
"-t", "V3_5_0"
};
App.main(args);
connectionProperties.getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
List<Map<String, Object>> values = jdbcTemplate.queryForList("SELECT * FROM hfj_spidx_token");
assertFalse(values.get(0).keySet().contains("HASH_IDENTITY"));
return null;
});
ourLog.info("**********************************************");
ourLog.info("Done Setup, Starting Migration...");
ourLog.info("**********************************************");
args = new String[]{
"migrate-database",
"-d", "H2_EMBEDDED",
"-u", url,
"-n", "",
"-p", "",
"-f", "V3_4_0",
"-t", "V3_5_0"
};
App.main(args);
connectionProperties.getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
List<Map<String, Object>> values = jdbcTemplate.queryForList("SELECT * FROM hfj_spidx_token");
assertEquals(1, values.size());
assertEquals("identifier", values.get(0).get("SP_NAME"));
assertEquals("12345678", values.get(0).get("SP_VALUE"));
assertTrue(values.get(0).keySet().contains("HASH_IDENTITY"));
assertEquals(7001889285610424179L, values.get(0).get("HASH_IDENTITY"));
return null;
});
}
private void seedDatabase340(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
theConnectionProperties.getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = theConnectionProperties.newJdbcTemplate();
@ -306,51 +226,9 @@ public class HapiMigrateDatabaseCommandTest {
}
@Test
public void testMigrate_340_350_NoMigrateHashes() throws IOException {
File location = getLocation("migrator_h2_test_340_350_nmh");
String url = "jdbc:h2:" + location.getAbsolutePath() + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
String initSql = "/persistence_create_h2_340.sql";
executeSqlStatements(connectionProperties, initSql);
seedDatabase340(connectionProperties);
ourLog.info("**********************************************");
ourLog.info("Done Setup, Starting Migration...");
ourLog.info("**********************************************");
String[] args = new String[]{
"migrate-database",
"-d", "H2_EMBEDDED",
"-u", url,
"-n", "",
"-p", "",
"-f", "V3_4_0",
"-t", "V3_5_0",
"-x", "no-migrate-350-hashes"
};
App.main(args);
connectionProperties.getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
List<Map<String, Object>> values = jdbcTemplate.queryForList("SELECT * FROM hfj_spidx_token");
assertEquals(1, values.size());
assertEquals("identifier", values.get(0).get("SP_NAME"));
assertEquals("12345678", values.get(0).get("SP_VALUE"));
assertEquals(null, values.get(0).get("HASH_IDENTITY"));
return null;
});
}
private void executeSqlStatements(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theInitSql) throws
IOException {
String script = IOUtils.toString(HapiMigrateDatabaseCommandTest.class.getResourceAsStream(theInitSql), Charsets.UTF_8);
String script = IOUtils.toString(HapiFlywayMigrateDatabaseCommandTest.class.getResourceAsStream(theInitSql), Charsets.UTF_8);
List<String> scriptStatements = new ArrayList<>(Arrays.asList(script.split("\n")));
for (int i = 0; i < scriptStatements.size(); i++) {
String nextStatement = scriptStatements.get(i);

View File

@ -465,60 +465,23 @@ public class ResourceProviderCustomSearchParamR4Test extends BaseResourceProvide
Interceptor interceptor = new Interceptor();
myInterceptorRegistry.registerInterceptor(interceptor);
try {
// Add a custom search parameter
SearchParameter fooSp = new SearchParameter();
fooSp.addBase("Questionnaire");
fooSp.setCode("item-text");
fooSp.setName("item-text");
fooSp.setType(Enumerations.SearchParamType.STRING);
fooSp.setTitle("FOO SP");
fooSp.setExpression("Questionnaire.item.text | Questionnaire.item.item.text | Questionnaire.item.item.item.text");
fooSp.setXpathUsage(org.hl7.fhir.r4.model.SearchParameter.XPathUsageType.NORMAL);
fooSp.setStatus(org.hl7.fhir.r4.model.Enumerations.PublicationStatus.ACTIVE);
mySearchParameterDao.create(fooSp, mySrd);
mySearchParamRegistry.forceRefresh();
int textIndex = 0;
List<Long> ids = new ArrayList<>();
for (int i = 0; i < 200; i++) {
//Lots and lots of matches
Questionnaire q = new Questionnaire();
q
.addItem()
.setText("Section " + (textIndex++))
.addItem()
.setText("Section " + (textIndex++))
.addItem()
.setText("Section " + (textIndex++));
q
.addItem()
.setText("Section " + (textIndex++))
.addItem()
.setText("Section " + (textIndex++))
.addItem()
.setText("Section " + (textIndex++));
q
.addItem()
.setText("Section " + (textIndex++))
.addItem()
.setText("Section " + (textIndex++))
.addItem()
.setText("Section " + (textIndex++));
q
.addItem()
.setText("Section " + (textIndex++))
.addItem()
.setText("Section " + (textIndex++))
.addItem()
.setText("Section " + (textIndex++));
q
.addItem()
.setText("Section " + (textIndex++))
.addItem()
.setText("Section " + (textIndex++))
.addItem()
.setText("Section " + (textIndex++));
ids.add(myQuestionnaireDao.create(q).getId().getIdPartAsLong());
Patient q = new Patient();
q.addIdentifier().setSystem("System_" + textIndex++).setValue("FOO");
q.addIdentifier().setSystem("System_" + textIndex++).setValue("FOO");
q.addIdentifier().setSystem("System_" + textIndex++).setValue("FOO");
q.addIdentifier().setSystem("System_" + textIndex++).setValue("FOO");
q.addIdentifier().setSystem("System_" + textIndex++).setValue("FOO");
q.addIdentifier().setSystem("System_" + textIndex++).setValue("FOO");
q.addIdentifier().setSystem("System_" + textIndex++).setValue("FOO");
q.addIdentifier().setSystem("System_" + textIndex++).setValue("FOO");
q.addIdentifier().setSystem("System_" + textIndex++).setValue("FOO");
q.addIdentifier().setSystem("System_" + textIndex++).setValue("FOO");
ids.add(myPatientDao.create(q).getId().getIdPartAsLong());
}
myCaptureQueriesListener.clear();
@ -531,7 +494,7 @@ public class ResourceProviderCustomSearchParamR4Test extends BaseResourceProvide
if (bundle == null) {
bundle = ourClient
.search()
.byUrl(ourServerBase + "/Questionnaire?item-text=Section")
.byUrl(ourServerBase + "/Patient?identifier=FOO")
.returnBundle(Bundle.class)
.execute();
} else {

View File

@ -75,6 +75,11 @@
<artifactId>annotations</artifactId>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
</dependency>
</dependencies>
<build>

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.lang3.Validate;
@ -66,6 +67,47 @@ public enum DriverTypeEnum {
myDerby = theDerby;
}
public String getDriverClassName() {
return myDriverClassName;
}
public String getSchemaFilename() {
String retval;
switch (this) {
case H2_EMBEDDED:
retval = "h2.sql";
break;
case DERBY_EMBEDDED:
retval = "derbytenseven.sql";
break;
case MYSQL_5_7:
case MARIADB_10_1:
retval = "mysql57.sql";
break;
case POSTGRES_9_4:
retval = "postgresql92.sql";
break;
case ORACLE_12C:
retval = "oracle12c.sql";
break;
case MSSQL_2012:
retval = "sqlserver2012.sql";
break;
default:
throw new ConfigurationException("No schema initialization script available for driver " + this);
}
return retval;
}
public static DriverTypeEnum fromDriverClassName(String theDriverClassName) {
for (DriverTypeEnum driverTypeEnum : DriverTypeEnum.values()) {
if (driverTypeEnum.myDriverClassName.equals(theDriverClassName)) {
return driverTypeEnum;
}
}
return null;
}
public ConnectionProperties newConnectionProperties(String theUrl, String theUsername, String thePassword) {
Driver driver;
@ -99,7 +141,7 @@ public enum DriverTypeEnum {
return new ConnectionProperties(dataSource, txTemplate, this);
}
public static class ConnectionProperties {
public static class ConnectionProperties implements AutoCloseable {
private final DriverTypeEnum myDriverType;
private final DataSource myDataSource;
@ -139,6 +181,7 @@ public enum DriverTypeEnum {
return myTxTemplate;
}
@Override
public void close() {
if (myDataSource instanceof DisposableBean) {
try {

View File

@ -0,0 +1,93 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR JPA Server - Migration
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.flywaydb.core.api.MigrationVersion;
import org.flywaydb.core.api.migration.Context;
import org.flywaydb.core.api.migration.JavaMigration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import static org.apache.commons.lang3.StringUtils.isBlank;
public class FlywayMigration implements JavaMigration {
private static final Logger ourLog = LoggerFactory.getLogger(FlywayMigration.class);
private final BaseTask myTask;
private final FlywayMigrator myFlywayMigrator;
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
public FlywayMigration(BaseTask theTask, FlywayMigrator theFlywayMigrator) {
myTask = theTask;
myFlywayMigrator = theFlywayMigrator;
}
@Override
public MigrationVersion getVersion() {
return MigrationVersion.fromVersion(myTask.getFlywayVersion());
}
@Override
public String getDescription() {
return myTask.getDescription();
}
@Override
public Integer getChecksum() {
return myTask.hashCode();
}
@Override
public boolean isUndo() {
return false;
}
@Override
public boolean canExecuteInTransaction() {
return false;
}
@Override
public void migrate(Context theContext) throws Exception {
myTask.setDriverType(myFlywayMigrator.getDriverType());
myTask.setDryRun(myFlywayMigrator.isDryRun());
myTask.setNoColumnShrink(myFlywayMigrator.isNoColumnShrink());
myTask.setConnectionProperties(myConnectionProperties);
try {
myTask.execute();
} catch (SQLException e) {
String description = myTask.getDescription();
if (isBlank(description)) {
description = myTask.getClass().getSimpleName();
}
String prefix = "Failure executing task \"" + description + "\", aborting! Cause: ";
throw new InternalErrorException(prefix + e.toString(), e);
}
}
public void setConnectionProperties(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
myConnectionProperties = theConnectionProperties;
}
}

View File

@ -0,0 +1,162 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR JPA Server - Migration
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.dbcp2.BasicDataSource;
import org.flywaydb.core.Flyway;
import org.flywaydb.core.api.MigrationInfoService;
import org.flywaydb.core.api.migration.JavaMigration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
public class FlywayMigrator {
private static final Logger ourLog = LoggerFactory.getLogger(FlywayMigrator.class);
private DriverTypeEnum myDriverType;
private final String myMigrationTableName;
private String myConnectionUrl;
private String myUsername;
private String myPassword;
private List<FlywayMigration> myTasks = new ArrayList<>();
private boolean myDryRun;
private boolean myNoColumnShrink;
public FlywayMigrator(String theMigrationTableName, BasicDataSource theDataSource) {
this(theMigrationTableName);
myConnectionUrl = theDataSource.getUrl();
myUsername = theDataSource.getUsername();
myPassword = theDataSource.getPassword();
String driverClassName = theDataSource.getDriverClassName();
if (driverClassName == null) {
ourLog.error(this.getClass().getSimpleName() + " constructed without a database driver");
} else {
myDriverType = DriverTypeEnum.fromDriverClassName(driverClassName);
if (myDriverType == null) {
ourLog.error("Unknown driver class " + driverClassName);
}
}
}
public FlywayMigrator(String theMigrationTableName) {
myMigrationTableName = theMigrationTableName;
}
public void setDriverType(DriverTypeEnum theDriverType) {
myDriverType = theDriverType;
}
public void setConnectionUrl(String theConnectionUrl) {
myConnectionUrl = theConnectionUrl;
}
public void setUsername(String theUsername) {
myUsername = theUsername;
}
public void setPassword(String thePassword) {
myPassword = thePassword;
}
public void addTask(BaseTask<?> theTask) {
myTasks.add(new FlywayMigration(theTask, this));
}
public void setDryRun(boolean theDryRun) {
myDryRun = theDryRun;
}
public void migrate() {
try (DriverTypeEnum.ConnectionProperties connectionProperties = myDriverType.newConnectionProperties(myConnectionUrl, myUsername, myPassword)) {
Flyway flyway = initFlyway(connectionProperties);
flyway.migrate();
} catch (Exception e) {
throw e;
}
}
private Flyway initFlyway(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
// TODO KHS Is there a way we can use datasource instead of url, username, password here
Flyway flyway = Flyway.configure()
.table(myMigrationTableName)
.dataSource(myConnectionUrl, myUsername, myPassword)
.baselineOnMigrate(true)
.javaMigrations(myTasks.toArray(new JavaMigration[0]))
.load();
for (FlywayMigration task : myTasks) {
task.setConnectionProperties(theConnectionProperties);
}
return flyway;
}
public void addTasks(List<BaseTask<?>> theTasks) {
theTasks.forEach(this::addTask);
}
public void setNoColumnShrink(boolean theNoColumnShrink) {
myNoColumnShrink = theNoColumnShrink;
}
public DriverTypeEnum getDriverType() {
return myDriverType;
}
public String getConnectionUrl() {
return myConnectionUrl;
}
public String getUsername() {
return myUsername;
}
public String getPassword() {
return myPassword;
}
public boolean isDryRun() {
return myDryRun;
}
public boolean isNoColumnShrink() {
return myNoColumnShrink;
}
public MigrationInfoService getMigrationInfo() {
if (myDriverType == null) {
return null;
}
try (DriverTypeEnum.ConnectionProperties connectionProperties = myDriverType.newConnectionProperties(myConnectionUrl, myUsername, myPassword)) {
Flyway flyway = initFlyway(connectionProperties);
return flyway.info();
}
}
@VisibleForTesting
public void removeAllTasksForUnitTest() {
myTasks.clear();
}
}

View File

@ -441,6 +441,9 @@ public class JdbcUtils {
if ("SYSTEM TABLE".equalsIgnoreCase(tableType)) {
continue;
}
if (SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME.equalsIgnoreCase(tableName)) {
continue;
}
columnNames.add(tableName);
}

View File

@ -0,0 +1,97 @@
package ca.uhn.fhir.jpa.migrate;
/*-
* #%L
* HAPI FHIR JPA Server - Migration
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import org.apache.commons.dbcp2.BasicDataSource;
import org.flywaydb.core.api.MigrationInfo;
import org.flywaydb.core.api.MigrationInfoService;
import org.hibernate.cfg.AvailableSettings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.List;
import java.util.Properties;
public class SchemaMigrator {
private static final Logger ourLog = LoggerFactory.getLogger(SchemaMigrator.class);
public static final String HAPI_FHIR_MIGRATION_TABLENAME = "FLY_HFJ_MIGRATION";
private final BasicDataSource myDataSource;
private final FlywayMigrator myMigrator;
private final boolean mySkipValidation;
public SchemaMigrator(String theMigrationTableName, BasicDataSource theDataSource, Properties jpaProperties, List<BaseTask<?>> theMigrationTasks) {
myDataSource = theDataSource;
if (jpaProperties.containsKey(AvailableSettings.HBM2DDL_AUTO) && "update".equals(jpaProperties.getProperty(AvailableSettings.HBM2DDL_AUTO))) {
mySkipValidation = true;
} else {
mySkipValidation = false;
}
myMigrator = new FlywayMigrator(theMigrationTableName, theDataSource);
myMigrator.addTasks(theMigrationTasks);
}
public void validate() {
if (mySkipValidation) {
ourLog.warn("Database running in hibernate auto-update mode. Skipping schema validation.");
return;
}
try (Connection connection = myDataSource.getConnection()) {
MigrationInfoService migrationInfo = myMigrator.getMigrationInfo();
if (migrationInfo.pending().length > 0) {
throw new ConfigurationException("The database schema for " + myDataSource.getUrl() + " is out of date. " +
"Current database schema version is " + getCurrentVersion(migrationInfo) + ". Schema version required by application is " +
getLastVersion(migrationInfo) + ". Please run the database migrator.");
}
ourLog.info("Database schema confirmed at expected version " + getCurrentVersion(migrationInfo));
} catch (SQLException e) {
throw new ConfigurationException("Unable to connect to " + myDataSource.toString(), e);
}
}
public void migrate() {
if (mySkipValidation) {
ourLog.warn("Database running in hibernate auto-update mode. Skipping schema migration.");
return;
}
myMigrator.migrate();
}
private String getCurrentVersion(MigrationInfoService theMigrationInfo) {
MigrationInfo migrationInfo = theMigrationInfo.current();
if (migrationInfo == null) {
return "unknown";
}
return migrationInfo.getVersion().toString();
}
private String getLastVersion(MigrationInfoService theMigrationInfo) {
MigrationInfo[] pending = theMigrationInfo.pending();
if (pending.length > 0) {
return pending[pending.length - 1].getVersion().toString();
}
return "unknown";
}
}

View File

@ -31,12 +31,21 @@ public class AddColumnTask extends BaseTableColumnTypeTask<AddColumnTask> {
private static final Logger ourLog = LoggerFactory.getLogger(AddColumnTask.class);
public AddColumnTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
@Override
public void validate() {
super.validate();
setDescription("Add column " + getColumnName() + " on table " + getTableName());
}
@Override
public void execute() throws SQLException {
Set<String> columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), getTableName());
if (columnNames.contains(getColumnName())) {
ourLog.info("Column {} already exists on table {} - No action performed", getColumnName(), getTableName());
logInfo(ourLog, "Column {} already exists on table {} - No action performed", getColumnName(), getTableName());
return;
}
@ -59,7 +68,7 @@ public class AddColumnTask extends BaseTableColumnTypeTask<AddColumnTask> {
throw new IllegalStateException();
}
ourLog.info("Adding column {} of type {} to table {}", getColumnName(), getSqlType(), getTableName());
logInfo(ourLog, "Adding column {} of type {} to table {}", getColumnName(), getSqlType(), getTableName());
executeSql(getTableName(), sql);
}

View File

@ -22,6 +22,8 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -37,6 +39,10 @@ public class AddForeignKeyTask extends BaseTableColumnTask<AddForeignKeyTask> {
private String myForeignTableName;
private String myForeignColumnName;
public AddForeignKeyTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
public void setConstraintName(String theConstraintName) {
myConstraintName = theConstraintName;
}
@ -56,6 +62,7 @@ public class AddForeignKeyTask extends BaseTableColumnTask<AddForeignKeyTask> {
Validate.isTrue(isNotBlank(myConstraintName));
Validate.isTrue(isNotBlank(myForeignTableName));
Validate.isTrue(isNotBlank(myForeignColumnName));
setDescription("Add foreign key " + myConstraintName + " from column " + getColumnName() + " of table " + getTableName() + " to column " + myForeignColumnName + " of table " + myForeignTableName);
}
@Override
@ -63,7 +70,7 @@ public class AddForeignKeyTask extends BaseTableColumnTask<AddForeignKeyTask> {
Set<String> existing = JdbcUtils.getForeignKeys(getConnectionProperties(), myForeignTableName, getTableName());
if (existing.contains(myConstraintName)) {
ourLog.info("Already have constraint named {} - No action performed", myConstraintName);
logInfo(ourLog, "Already have constraint named {} - No action performed", myConstraintName);
return;
}
@ -97,4 +104,29 @@ public class AddForeignKeyTask extends BaseTableColumnTask<AddForeignKeyTask> {
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
AddForeignKeyTask that = (AddForeignKeyTask) theO;
return new EqualsBuilder()
.appendSuper(super.equals(theO))
.append(myConstraintName, that.myConstraintName)
.append(myForeignTableName, that.myForeignTableName)
.append(myForeignColumnName, that.myForeignColumnName)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.appendSuper(super.hashCode())
.append(myConstraintName)
.append(myForeignTableName)
.append(myForeignColumnName)
.toHashCode();
}
}

View File

@ -22,6 +22,8 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -36,13 +38,15 @@ public class AddIdGeneratorTask extends BaseTask<AddIdGeneratorTask> {
private static final Logger ourLog = LoggerFactory.getLogger(AddIdGeneratorTask.class);
private final String myGeneratorName;
public AddIdGeneratorTask(String theGeneratorName) {
public AddIdGeneratorTask(String theProductVersion, String theSchemaVersion, String theGeneratorName) {
super(theProductVersion, theSchemaVersion);
myGeneratorName = theGeneratorName;
}
@Override
public void validate() {
Validate.notBlank(myGeneratorName);
setDescription("Add id generator " + myGeneratorName);
}
@Override
@ -89,7 +93,7 @@ public class AddIdGeneratorTask extends BaseTask<AddIdGeneratorTask> {
.collect(Collectors.toSet());
ourLog.debug("Currently have sequences: {}", sequenceNames);
if (sequenceNames.contains(myGeneratorName.toLowerCase())) {
ourLog.info("Sequence {} already exists - No action performed", myGeneratorName);
logInfo(ourLog, "Sequence {} already exists - No action performed", myGeneratorName);
return;
}
@ -98,4 +102,23 @@ public class AddIdGeneratorTask extends BaseTask<AddIdGeneratorTask> {
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (!(theO instanceof AddIdGeneratorTask)) return false;
AddIdGeneratorTask that = (AddIdGeneratorTask) theO;
return new EqualsBuilder()
.append(myGeneratorName, that.myGeneratorName)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(myGeneratorName)
.toHashCode();
}
}

View File

@ -22,6 +22,8 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.thymeleaf.util.StringUtils;
@ -39,6 +41,10 @@ public class AddIndexTask extends BaseTableTask<AddIndexTask> {
private List<String> myColumns;
private Boolean myUnique;
public AddIndexTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
public void setIndexName(String theIndexName) {
myIndexName = StringUtils.toUpperCase(theIndexName, Locale.US);
}
@ -55,19 +61,20 @@ public class AddIndexTask extends BaseTableTask<AddIndexTask> {
public void validate() {
super.validate();
Validate.notBlank(myIndexName, "Index name not specified");
Validate.isTrue(myColumns.size() > 0, "Columns not specified");
Validate.isTrue(myColumns.size() > 0, "Columns not specified for AddIndexTask " + myIndexName + " on table " + getTableName());
Validate.notNull(myUnique, "Uniqueness not specified");
setDescription("Add " + myIndexName + " index to table " + getTableName());
}
@Override
public void execute() throws SQLException {
Set<String> indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), getTableName());
if (indexNames.contains(myIndexName)) {
ourLog.info("Index {} already exists on table {} - No action performed", myIndexName, getTableName());
logInfo(ourLog, "Index {} already exists on table {} - No action performed", myIndexName, getTableName());
return;
}
ourLog.info("Going to add a {} index named {} on table {} for columns {}", (myUnique ? "UNIQUE" : "NON-UNIQUE"), myIndexName, getTableName(), myColumns);
logInfo(ourLog, "Going to add a {} index named {} on table {} for columns {}", (myUnique ? "UNIQUE" : "NON-UNIQUE"), myIndexName, getTableName(), myColumns);
String unique = myUnique ? "unique " : "";
String columns = String.join(", ", myColumns);
@ -88,4 +95,30 @@ public class AddIndexTask extends BaseTableTask<AddIndexTask> {
public void setColumns(String... theColumns) {
setColumns(Arrays.asList(theColumns));
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
AddIndexTask that = (AddIndexTask) theO;
return new EqualsBuilder()
.appendSuper(super.equals(theO))
.append(myIndexName, that.myIndexName)
.append(myColumns, that.myColumns)
.append(myUnique, that.myUnique)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.appendSuper(super.hashCode())
.append(myIndexName)
.append(myColumns)
.append(myUnique)
.toHashCode();
}
}

View File

@ -22,6 +22,8 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -36,6 +38,16 @@ public class AddTableByColumnTask extends BaseTableTask<AddTableByColumnTask> {
private List<AddColumnTask> myAddColumnTasks = new ArrayList<>();
private String myPkColumn;
public AddTableByColumnTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
@Override
public void validate() {
super.validate();
setDescription("Add table " + getTableName());
}
public void addAddColumnTask(AddColumnTask theTask) {
Validate.notNull(theTask);
myAddColumnTasks.add(theTask);
@ -49,7 +61,7 @@ public class AddTableByColumnTask extends BaseTableTask<AddTableByColumnTask> {
public void execute() throws SQLException {
if (JdbcUtils.getTableNames(getConnectionProperties()).contains(getTableName())) {
ourLog.info("Already have table named {} - No action performed", getTableName());
logInfo(ourLog, "Already have table named {} - No action performed", getTableName());
return;
}
@ -91,4 +103,28 @@ public class AddTableByColumnTask extends BaseTableTask<AddTableByColumnTask> {
executeSql(getTableName(), sb.toString());
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
AddTableByColumnTask that = (AddTableByColumnTask) theO;
return new EqualsBuilder()
.appendSuper(super.equals(theO))
.append(myAddColumnTasks, that.myAddColumnTasks)
.append(myPkColumn, that.myPkColumn)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.appendSuper(super.hashCode())
.append(myAddColumnTasks)
.append(myPkColumn)
.toHashCode();
}
}

View File

@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.intellij.lang.annotations.Language;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -37,6 +39,16 @@ public class AddTableRawSqlTask extends BaseTableTask<AddTableRawSqlTask> {
private Map<DriverTypeEnum, List<String>> myDriverToSqls = new HashMap<>();
private List<String> myDriverNeutralSqls = new ArrayList<>();
public AddTableRawSqlTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
@Override
public void validate() {
super.validate();
setDescription("Add table using raw sql");
}
public void addSql(DriverTypeEnum theDriverType, @Language("SQL") String theSql) {
Validate.notNull(theDriverType);
Validate.notBlank(theSql);
@ -49,14 +61,14 @@ public class AddTableRawSqlTask extends BaseTableTask<AddTableRawSqlTask> {
public void execute() throws SQLException {
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
if (tableNames.contains(getTableName())) {
ourLog.info("Table {} already exists - No action performed", getTableName());
logInfo(ourLog, "Table {} already exists - No action performed", getTableName());
return;
}
List<String> sqlStatements = myDriverToSqls.computeIfAbsent(getDriverType(), t -> new ArrayList<>());
sqlStatements.addAll(myDriverNeutralSqls);
ourLog.info("Going to create table {} using {} SQL statements", getTableName(), sqlStatements.size());
logInfo(ourLog, "Going to create table {} using {} SQL statements", getTableName(), sqlStatements.size());
getConnectionProperties().getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
@ -73,4 +85,26 @@ public class AddTableRawSqlTask extends BaseTableTask<AddTableRawSqlTask> {
Validate.notBlank("theSql must not be null", theSql);
myDriverNeutralSqls.add(theSql);
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
AddTableRawSqlTask that = (AddTableRawSqlTask) theO;
return new EqualsBuilder()
.appendSuper(super.equals(theO))
.append(myDriverNeutralSqls, that.myDriverNeutralSqls)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.appendSuper(super.hashCode())
.append(myDriverNeutralSqls)
.toHashCode();
}
}

View File

@ -21,7 +21,10 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
*/
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.util.VersionEnum;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.ColumnMapRowMapper;
@ -44,7 +47,8 @@ public class ArbitrarySqlTask extends BaseTask<ArbitrarySqlTask> {
private String myExecuteOnlyIfTableExists;
private List<TableAndColumn> myConditionalOnExistenceOf = new ArrayList<>();
public ArbitrarySqlTask(String theTableName, String theDescription) {
public ArbitrarySqlTask(VersionEnum theRelease, String theVersion, String theTableName, String theDescription) {
super(theRelease.toString(), theVersion);
myTableName = theTableName;
myDescription = theDescription;
}
@ -60,12 +64,12 @@ public class ArbitrarySqlTask extends BaseTask<ArbitrarySqlTask> {
@Override
public void execute() throws SQLException {
ourLog.info("Starting: {}", myDescription);
logInfo(ourLog, "Starting: {}", myDescription);
if (StringUtils.isNotBlank(myExecuteOnlyIfTableExists)) {
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
if (!tableNames.contains(myExecuteOnlyIfTableExists.toUpperCase())) {
ourLog.info("Table {} does not exist - No action performed", myExecuteOnlyIfTableExists);
logInfo(ourLog, "Table {} does not exist - No action performed", myExecuteOnlyIfTableExists);
return;
}
}
@ -73,7 +77,7 @@ public class ArbitrarySqlTask extends BaseTask<ArbitrarySqlTask> {
for (TableAndColumn next : myConditionalOnExistenceOf) {
JdbcUtils.ColumnType columnType = JdbcUtils.getColumnType(getConnectionProperties(), next.getTable(), next.getColumn());
if (columnType == null) {
ourLog.info("Table {} does not have column {} - No action performed", next.getTable(), next.getColumn());
logInfo(ourLog, "Table {} does not have column {} - No action performed", next.getTable(), next.getColumn());
return;
}
}
@ -116,6 +120,7 @@ public class ArbitrarySqlTask extends BaseTask<ArbitrarySqlTask> {
mySql = theSql;
myMode = theMode;
myConsumer = theConsumer;
setDescription("Execute raw sql");
}
@ -127,14 +132,14 @@ public class ArbitrarySqlTask extends BaseTask<ArbitrarySqlTask> {
List<Map<String, Object>> rows;
do {
ourLog.info("Querying for up to {} rows", myBatchSize);
logInfo(ourLog, "Querying for up to {} rows", myBatchSize);
rows = getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = newJdbcTemnplate();
jdbcTemplate.setMaxRows(myBatchSize);
return jdbcTemplate.query(mySql, new ColumnMapRowMapper());
});
ourLog.info("Processing {} rows", rows.size());
logInfo(ourLog, "Processing {} rows", rows.size());
List<Map<String, Object>> finalRows = rows;
getTxTemplate().execute(t -> {
for (Map<String, Object> nextRow : finalRows) {
@ -163,4 +168,26 @@ public class ArbitrarySqlTask extends BaseTask<ArbitrarySqlTask> {
return myColumn;
}
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (!(theO instanceof ArbitrarySqlTask)) return false;
ArbitrarySqlTask that = (ArbitrarySqlTask) theO;
return new EqualsBuilder()
.append(myTableName, that.myTableName)
.append(myExecuteOnlyIfTableExists, that.myExecuteOnlyIfTableExists)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(myTableName)
.append(myExecuteOnlyIfTableExists)
.toHashCode();
}
}

View File

@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
*/
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.thymeleaf.util.StringUtils;
import java.util.Locale;
@ -29,6 +31,10 @@ public abstract class BaseTableColumnTask<T extends BaseTableTask> extends BaseT
private String myColumnName;
public BaseTableColumnTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
@SuppressWarnings("unchecked")
public T setColumnName(String theColumnName) {
myColumnName = StringUtils.toUpperCase(theColumnName, Locale.US);
@ -46,5 +52,25 @@ public abstract class BaseTableColumnTask<T extends BaseTableTask> extends BaseT
Validate.notBlank(myColumnName, "Column name not specified");
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (!(theO instanceof BaseTableColumnTask)) return false;
BaseTableColumnTask<?> that = (BaseTableColumnTask<?>) theO;
return new EqualsBuilder()
.appendSuper(super.equals(theO))
.append(myColumnName, that.myColumnName)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.appendSuper(super.hashCode())
.append(myColumnName)
.toHashCode();
}
}

View File

@ -22,13 +22,19 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import org.apache.commons.lang3.Validate;
import org.springframework.util.Assert;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
public abstract class BaseTableColumnTypeTask<T extends BaseTableTask> extends BaseTableColumnTask<T> {
private static final Logger ourLog = LoggerFactory.getLogger(BaseTableColumnTypeTask.class);
private ColumnTypeEnum myColumnType;
private Map<ColumnTypeEnum, Map<DriverTypeEnum, String>> myColumnTypeToDriverTypeToSqlType = new HashMap<>();
@ -38,7 +44,9 @@ public abstract class BaseTableColumnTypeTask<T extends BaseTableTask> extends B
/**
* Constructor
*/
BaseTableColumnTypeTask() {
public BaseTableColumnTypeTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
setColumnType(ColumnTypeEnum.INT, DriverTypeEnum.H2_EMBEDDED, "integer");
setColumnType(ColumnTypeEnum.INT, DriverTypeEnum.DERBY_EMBEDDED, "integer");
setColumnType(ColumnTypeEnum.INT, DriverTypeEnum.MARIADB_10_1, "integer");
@ -137,11 +145,15 @@ public abstract class BaseTableColumnTypeTask<T extends BaseTableTask> extends B
}
protected String getSqlType() {
return getSqlType(getColumnLength());
}
protected String getSqlType(Long theColumnLength) {
String retVal = myColumnTypeToDriverTypeToSqlType.get(myColumnType).get(getDriverType());
Objects.requireNonNull(retVal);
if (myColumnType == ColumnTypeEnum.STRING) {
retVal = retVal.replace("?", Long.toString(getColumnLength()));
retVal = retVal.replace("?", Long.toString(theColumnLength));
}
return retVal;
@ -184,4 +196,37 @@ public abstract class BaseTableColumnTypeTask<T extends BaseTableTask> extends B
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (!(theO instanceof BaseTableColumnTypeTask)) return false;
BaseTableColumnTypeTask<?> that = (BaseTableColumnTypeTask<?>) theO;
return new EqualsBuilder()
.appendSuper(super.equals(theO))
.append(getColumnTypeName(myColumnType), getColumnTypeName(that.myColumnType))
.append(myNullable, that.myNullable)
.append(myColumnLength, that.myColumnLength)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.appendSuper(super.hashCode())
.append(getColumnTypeName(myColumnType))
.append(myNullable)
.append(myColumnLength)
.toHashCode();
}
@Nullable
private Object getColumnTypeName(ColumnTypeEnum theColumnType) {
if (theColumnType == null) {
return null;
}
return myColumnType.name();
}
}

View File

@ -21,10 +21,16 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
*/
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
public abstract class BaseTableTask<T extends BaseTableTask> extends BaseTask {
private String myTableName;
public BaseTableTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
public String getTableName() {
return myTableName;
}
@ -39,4 +45,24 @@ public abstract class BaseTableTask<T extends BaseTableTask> extends BaseTask {
public void validate() {
Validate.notBlank(myTableName);
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (!(theO instanceof BaseTableTask)) return false;
BaseTableTask<?> that = (BaseTableTask<?>) theO;
return new EqualsBuilder()
.append(myTableName, that.myTableName)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(myTableName)
.toHashCode();
}
}

View File

@ -32,10 +32,14 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public abstract class BaseTask<T extends BaseTask> {
private static final Logger ourLog = LoggerFactory.getLogger(BaseTask.class);
public static final String MIGRATION_VERSION_PATTERN = "\\d{8}\\.\\d+";
private static final Pattern versionPattern = Pattern.compile(MIGRATION_VERSION_PATTERN);
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
private DriverTypeEnum myDriverType;
private String myDescription;
@ -43,6 +47,13 @@ public abstract class BaseTask<T extends BaseTask> {
private boolean myDryRun;
private List<ExecutedStatement> myExecutedStatements = new ArrayList<>();
private boolean myNoColumnShrink;
private final String myProductVersion;
private final String mySchemaVersion;
protected BaseTask(String theProductVersion, String theSchemaVersion) {
myProductVersion = theProductVersion;
mySchemaVersion = theSchemaVersion;
}
public boolean isNoColumnShrink() {
return myNoColumnShrink;
@ -61,6 +72,9 @@ public abstract class BaseTask<T extends BaseTask> {
}
public String getDescription() {
if (myDescription == null) {
return this.getClass().getSimpleName();
}
return myDescription;
}
@ -88,7 +102,7 @@ public abstract class BaseTask<T extends BaseTask> {
Integer changes = getConnectionProperties().getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
int changesCount = jdbcTemplate.update(theSql, theArguments);
ourLog.info("SQL \"{}\" returned {}", theSql, changesCount);
logInfo(ourLog, "SQL \"{}\" returned {}", theSql, changesCount);
return changesCount;
});
@ -130,6 +144,25 @@ public abstract class BaseTask<T extends BaseTask> {
public abstract void execute() throws SQLException;
public String getFlywayVersion() {
String releasePart = myProductVersion;
if (releasePart.startsWith("V")) {
releasePart = releasePart.substring(1);
}
return releasePart + "." + mySchemaVersion;
}
protected void logInfo(Logger theLog, String theFormattedMessage, Object... theArguments) {
theLog.info(getFlywayVersion() + ": " + theFormattedMessage, theArguments);
}
public void validateVersion() {
Matcher matcher = versionPattern.matcher(mySchemaVersion);
if (!matcher.matches()) {
throw new IllegalStateException("The version " + mySchemaVersion + " does not match the expected pattern " + MIGRATION_VERSION_PATTERN);
}
}
public static class ExecutedStatement {
private final String mySql;
private final List<Object> myArguments;

View File

@ -20,7 +20,9 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
* #L%
*/
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.VersionEnum;
import com.google.common.collect.ForwardingMap;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
@ -32,10 +34,7 @@ import org.springframework.jdbc.core.RowCallbackHandler;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.concurrent.*;
import java.util.function.Function;
@ -53,8 +52,9 @@ public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask
/**
* Constructor
*/
public CalculateHashesTask() {
super();
public CalculateHashesTask(VersionEnum theRelease, String theVersion) {
super(theRelease.toString(), theVersion);
setDescription("Calculate resource search parameter index hashes");
}
@Override
@ -63,6 +63,14 @@ public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask
return;
}
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
// This table was added shortly after hash indexes were added, so it is a reasonable indicator for whether this
// migration has already been run
if (tableNames.contains("HFJ_RES_REINDEX_JOB")) {
logInfo(ourLog, "The table HFJ_RES_REINDEX_JOB already exists. Skipping calculate hashes task.");
return;
}
initializeExecutor();
try {
@ -72,7 +80,7 @@ public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask
JdbcTemplate jdbcTemplate = newJdbcTemnplate();
jdbcTemplate.setMaxRows(100000);
String sql = "SELECT * FROM " + getTableName() + " WHERE " + getColumnName() + " IS NULL";
ourLog.info("Finding up to {} rows in {} that requires hashes", myBatchSize, getTableName());
logInfo(ourLog, "Finding up to {} rows in {} that requires hashes", myBatchSize, getTableName());
jdbcTemplate.query(sql, rch);
rch.done();
@ -86,7 +94,7 @@ public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask
break;
}
ourLog.info("Waiting for {} tasks to complete", futures.size());
logInfo(ourLog, "Waiting for {} tasks to complete", futures.size());
for (Future<?> next : futures) {
try {
next.get();
@ -118,7 +126,7 @@ public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask
RejectedExecutionHandler rejectedExecutionHandler = new RejectedExecutionHandler() {
@Override
public void rejectedExecution(Runnable theRunnable, ThreadPoolExecutor theExecutor) {
ourLog.info("Note: Executor queue is full ({} elements), waiting for a slot to become available!", executorQueue.size());
logInfo(ourLog, "Note: Executor queue is full ({} elements), waiting for a slot to become available!", executorQueue.size());
StopWatch sw = new StopWatch();
try {
executorQueue.put(theRunnable);
@ -126,7 +134,7 @@ public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask
throw new RejectedExecutionException("Task " + theRunnable.toString() +
" rejected from " + theE.toString());
}
ourLog.info("Slot become available after {}ms", sw.getMillis());
logInfo(ourLog, "Slot become available after {}ms", sw.getMillis());
}
};
myExecutor = new ThreadPoolExecutor(
@ -182,7 +190,7 @@ public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask
return theRows.size();
});
ourLog.info("Updated {} rows on {} in {}", theRows.size(), getTableName(), sw.toString());
logInfo(ourLog, "Updated {} rows on {} in {}", theRows.size(), getTableName(), sw.toString());
};
return myExecutor.submit(task);
}

View File

@ -32,19 +32,28 @@ public class DropColumnTask extends BaseTableColumnTask<DropColumnTask> {
private static final Logger ourLog = LoggerFactory.getLogger(DropColumnTask.class);
public DropColumnTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
@Override
public void validate() {
super.validate();
setDescription("Drop column " + getColumnName() + " from table " + getTableName());
}
@Override
public void execute() throws SQLException {
Set<String> columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), getTableName());
if (!columnNames.contains(getColumnName())) {
ourLog.info("Column {} does not exist on table {} - No action performed", getColumnName(), getTableName());
logInfo(ourLog, "Column {} does not exist on table {} - No action performed", getColumnName(), getTableName());
return;
}
String tableName = getTableName();
String columnName = getColumnName();
String sql = createSql(tableName, columnName);
ourLog.info("Dropping column {} on table {}", getColumnName(), getTableName());
logInfo(ourLog, "Dropping column {} on table {}", getColumnName(), getTableName());
executeSql(getTableName(), sql);
}
@ -53,4 +62,5 @@ public class DropColumnTask extends BaseTableColumnTask<DropColumnTask> {
return "alter table " + theTableName + " drop column " + theColumnName;
}
}

View File

@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -40,6 +42,10 @@ public class DropForeignKeyTask extends BaseTableTask<DropForeignKeyTask> {
private String myConstraintName;
private String myParentTableName;
public DropForeignKeyTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
public void setConstraintName(String theConstraintName) {
myConstraintName = theConstraintName;
}
@ -54,6 +60,8 @@ public class DropForeignKeyTask extends BaseTableTask<DropForeignKeyTask> {
Validate.isTrue(isNotBlank(myConstraintName));
Validate.isTrue(isNotBlank(myParentTableName));
setDescription("Drop foreign key " + myConstraintName + " from table " + getTableName());
}
@Override
@ -61,7 +69,7 @@ public class DropForeignKeyTask extends BaseTableTask<DropForeignKeyTask> {
Set<String> existing = JdbcUtils.getForeignKeys(getConnectionProperties(), myParentTableName, getTableName());
if (!existing.contains(myConstraintName)) {
ourLog.info("Don't have constraint named {} - No action performed", myConstraintName);
logInfo(ourLog, "Don't have constraint named {} - No action performed", myConstraintName);
return;
}
@ -96,4 +104,27 @@ public class DropForeignKeyTask extends BaseTableTask<DropForeignKeyTask> {
return sqls;
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
DropForeignKeyTask that = (DropForeignKeyTask) theO;
return new EqualsBuilder()
.appendSuper(super.equals(theO))
.append(myConstraintName, that.myConstraintName)
.append(myParentTableName, that.myParentTableName)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.appendSuper(super.hashCode())
.append(myConstraintName)
.append(myParentTableName)
.toHashCode();
}
}

View File

@ -22,6 +22,8 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -36,13 +38,15 @@ public class DropIdGeneratorTask extends BaseTask<DropIdGeneratorTask> {
private static final Logger ourLog = LoggerFactory.getLogger(DropIdGeneratorTask.class);
private final String myGeneratorName;
public DropIdGeneratorTask(String theGeneratorName) {
public DropIdGeneratorTask(String theProductVersion, String theSchemaVersion, String theGeneratorName) {
super(theProductVersion, theSchemaVersion);
myGeneratorName = theGeneratorName;
}
@Override
public void validate() {
Validate.notBlank(myGeneratorName);
setDescription("Drop id generator " + myGeneratorName);
}
@Override
@ -89,7 +93,7 @@ public class DropIdGeneratorTask extends BaseTask<DropIdGeneratorTask> {
.collect(Collectors.toSet());
ourLog.debug("Currently have sequences: {}", sequenceNames);
if (!sequenceNames.contains(myGeneratorName.toLowerCase())) {
ourLog.info("Sequence {} does not exist - No action performed", myGeneratorName);
logInfo(ourLog, "Sequence {} does not exist - No action performed", myGeneratorName);
return;
}
@ -98,4 +102,23 @@ public class DropIdGeneratorTask extends BaseTask<DropIdGeneratorTask> {
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (!(theO instanceof DropIdGeneratorTask)) return false;
DropIdGeneratorTask that = (DropIdGeneratorTask) theO;
return new EqualsBuilder()
.append(myGeneratorName, that.myGeneratorName)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(myGeneratorName)
.toHashCode();
}
}

View File

@ -23,26 +23,33 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.intellij.lang.annotations.Language;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
public class DropIndexTask extends BaseTableTask<DropIndexTask> {
private static final Logger ourLog = LoggerFactory.getLogger(DropIndexTask.class);
private String myIndexName;
public DropIndexTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
@Override
public void validate() {
super.validate();
Validate.notBlank(myIndexName, "The index name must not be blank");
if (getDescription() == null) {
setDescription("Drop index " + myIndexName + " on table " + getTableName());
}
setDescription("Drop index " + myIndexName + " from table " + getTableName());
}
@Override
@ -50,7 +57,7 @@ public class DropIndexTask extends BaseTableTask<DropIndexTask> {
Set<String> indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), getTableName());
if (!indexNames.contains(myIndexName)) {
ourLog.info("Index {} does not exist on table {} - No action needed", myIndexName, getTableName());
logInfo(ourLog, "Index {} does not exist on table {} - No action needed", myIndexName, getTableName());
return;
}
@ -59,7 +66,7 @@ public class DropIndexTask extends BaseTableTask<DropIndexTask> {
List<String> sqls = createDropIndexSql(getConnectionProperties(), getTableName(), myIndexName, getDriverType());
if (!sqls.isEmpty()) {
ourLog.info("Dropping {} index {} on table {}", uniquenessString, myIndexName, getTableName());
logInfo(ourLog, "Dropping {} index {} on table {}", uniquenessString, myIndexName, getTableName());
}
for (@Language("SQL") String sql : sqls) {
executeSql(getTableName(), sql);
@ -123,4 +130,26 @@ public class DropIndexTask extends BaseTableTask<DropIndexTask> {
}
return sql;
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
DropIndexTask that = (DropIndexTask) theO;
return new EqualsBuilder()
.appendSuper(super.equals(theO))
.append(myIndexName, that.myIndexName)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.appendSuper(super.hashCode())
.append(myIndexName)
.toHashCode();
}
}

View File

@ -27,13 +27,22 @@ import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.List;
import java.util.Optional;
import java.util.Set;
public class DropTableTask extends BaseTableTask<DropTableTask> {
private static final Logger ourLog = LoggerFactory.getLogger(DropTableTask.class);
public DropTableTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
@Override
public void validate() {
super.validate();
setDescription("Drop table " + getTableName());
}
@Override
public void execute() throws SQLException {
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
@ -42,10 +51,10 @@ public class DropTableTask extends BaseTableTask<DropTableTask> {
}
Set<String> foreignKeys = JdbcUtils.getForeignKeys(getConnectionProperties(), null, getTableName());
ourLog.info("Table {} has the following foreign keys: {}", getTableName(), foreignKeys);
logInfo(ourLog, "Table {} has the following foreign keys: {}", getTableName(), foreignKeys);
Set<String> indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), getTableName());
ourLog.info("Table {} has the following indexes: {}", getTableName(), indexNames);
logInfo(ourLog, "Table {} has the following indexes: {}", getTableName(), indexNames);
for (String next : foreignKeys) {
List<String> sql = DropForeignKeyTask.generateSql(getTableName(), next, getDriverType());
@ -57,14 +66,14 @@ public class DropTableTask extends BaseTableTask<DropTableTask> {
for (String nextIndex : indexNames) {
List<String> sqls = DropIndexTask.createDropIndexSql(getConnectionProperties(), getTableName(), nextIndex, getDriverType());
if (!sqls.isEmpty()) {
ourLog.info("Dropping index {} on table {} in preparation for table delete", nextIndex, getTableName());
logInfo(ourLog, "Dropping index {} on table {} in preparation for table delete", nextIndex, getTableName());
}
for (@Language("SQL") String sql : sqls) {
executeSql(getTableName(), sql);
}
}
ourLog.info("Dropping table: {}", getTableName());
logInfo(ourLog, "Dropping table: {}", getTableName());
@Language("SQL")
String sql = "DROP TABLE " + getTableName();

View File

@ -22,11 +22,16 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.intellij.lang.annotations.Language;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ExecuteRawSqlTask extends BaseTask<ExecuteRawSqlTask> {
@ -34,6 +39,11 @@ public class ExecuteRawSqlTask extends BaseTask<ExecuteRawSqlTask> {
private Map<DriverTypeEnum, List<String>> myDriverToSqls = new HashMap<>();
private List<String> myDriverNeutralSqls = new ArrayList<>();
public ExecuteRawSqlTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
setDescription("Execute raw sql");
}
public ExecuteRawSqlTask addSql(DriverTypeEnum theDriverType, @Language("SQL") String theSql) {
Validate.notNull(theDriverType);
Validate.notBlank(theSql);
@ -61,11 +71,31 @@ public class ExecuteRawSqlTask extends BaseTask<ExecuteRawSqlTask> {
List<String> sqlStatements = myDriverToSqls.computeIfAbsent(getDriverType(), t -> new ArrayList<>());
sqlStatements.addAll(myDriverNeutralSqls);
ourLog.info("Going to execute {} SQL statements", sqlStatements.size());
logInfo(ourLog, "Going to execute {} SQL statements", sqlStatements.size());
for (String nextSql : sqlStatements) {
executeSql(null, nextSql);
}
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (!(theO instanceof ExecuteRawSqlTask)) return false;
ExecuteRawSqlTask that = (ExecuteRawSqlTask) theO;
return new EqualsBuilder()
.append(myDriverNeutralSqls, that.myDriverNeutralSqls)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(myDriverNeutralSqls)
.toHashCode();
}
}

View File

@ -0,0 +1,89 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
/*-
* #%L
* HAPI FHIR JPA Server - Migration
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.tasks.api.ISchemaInitializationProvider;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.List;
import java.util.Set;
public class InitializeSchemaTask extends BaseTask<InitializeSchemaTask> {
private static final Logger ourLog = LoggerFactory.getLogger(InitializeSchemaTask.class);
private final ISchemaInitializationProvider mySchemaInitializationProvider;
public InitializeSchemaTask(String theProductVersion, String theSchemaVersion, ISchemaInitializationProvider theSchemaInitializationProvider) {
super(theProductVersion, theSchemaVersion);
mySchemaInitializationProvider = theSchemaInitializationProvider;
setDescription("Initialize schema");
}
@Override
public void validate() {
// nothing
}
@Override
public void execute() throws SQLException {
DriverTypeEnum driverType = getDriverType();
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
String schemaExistsIndicatorTable = mySchemaInitializationProvider.getSchemaExistsIndicatorTable();
if (tableNames.contains(schemaExistsIndicatorTable)) {
logInfo(ourLog, "The table {} already exists. Skipping schema initialization for {}", schemaExistsIndicatorTable, driverType);
return;
}
logInfo(ourLog, "Initializing schema for {}", driverType);
List<String> sqlStatements = mySchemaInitializationProvider.getSqlStatements(driverType);
for (String nextSql : sqlStatements) {
executeSql(null, nextSql);
}
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
InitializeSchemaTask that = (InitializeSchemaTask) theO;
return new EqualsBuilder()
.append(mySchemaInitializationProvider, that.mySchemaInitializationProvider)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(mySchemaInitializationProvider)
.toHashCode();
}
}

View File

@ -1,47 +0,0 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
/*-
* #%L
* HAPI FHIR JPA Server - Migration
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LogStartSectionWithMessageTask extends BaseTask {
private static final Logger ourLog = LoggerFactory.getLogger(LogStartSectionWithMessageTask.class);
private final String myMessage;
public LogStartSectionWithMessageTask(String theMessage) {
myMessage = theMessage;
}
@Override
public void validate() {
// nothing
}
@Override
public void execute() {
ourLog.info("");
ourLog.info(StringUtils.leftPad("", myMessage.length(), "*"));
ourLog.info(myMessage);
ourLog.info(StringUtils.leftPad("", myMessage.length(), "*"));
}
}

View File

@ -32,6 +32,15 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask<ModifyColumnTask>
private static final Logger ourLog = LoggerFactory.getLogger(ModifyColumnTask.class);
public ModifyColumnTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
@Override
public void validate() {
super.validate();
setDescription("Modify column " + getColumnName() + " on table " + getTableName());
}
@Override
public void execute() throws SQLException {
@ -41,7 +50,7 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask<ModifyColumnTask>
Set<String> columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), getTableName());
if (!columnNames.contains(getColumnName())) {
ourLog.info("Column {} doesn't exist on table {} - No action performed", getColumnName(), getTableName());
logInfo(ourLog, "Column {} doesn't exist on table {} - No action performed", getColumnName(), getTableName());
return;
}
@ -52,21 +61,22 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask<ModifyColumnTask>
throw new InternalErrorException(e);
}
if (getColumnLength() != null && isNoColumnShrink()) {
Long columnLength = getColumnLength();
if (columnLength != null && isNoColumnShrink()) {
long existingLength = existingType.getLength() != null ? existingType.getLength() : 0;
if (existingLength > getColumnLength()) {
setColumnLength(existingLength);
if (existingLength > columnLength) {
columnLength = existingLength;
}
}
boolean alreadyOfCorrectType = existingType.equals(getColumnType(), getColumnLength());
boolean alreadyOfCorrectType = existingType.equals(getColumnType(), columnLength);
boolean alreadyCorrectNullable = isNullable() == nullable;
if (alreadyOfCorrectType && alreadyCorrectNullable) {
ourLog.info("Column {} on table {} is already of type {} and has nullable {} - No action performed", getColumnName(), getTableName(), existingType, nullable);
logInfo(ourLog, "Column {} on table {} is already of type {} and has nullable {} - No action performed", getColumnName(), getTableName(), existingType, nullable);
return;
}
String type = getSqlType();
String type = getSqlType(columnLength);
String notNull = getSqlNotNull();
String sql = null;
@ -119,13 +129,13 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask<ModifyColumnTask>
throw new IllegalStateException("Dont know how to handle " + getDriverType());
}
ourLog.info("Updating column {} on table {} to type {}", getColumnName(), getTableName(), type);
logInfo(ourLog, "Updating column {} on table {} to type {}", getColumnName(), getTableName(), type);
if (sql != null) {
executeSql(getTableName(), sql);
}
if (sqlNotNull != null) {
ourLog.info("Updating column {} on table {} to not null", getColumnName(), getTableName());
logInfo(ourLog, "Updating column {} on table {} to not null", getColumnName(), getTableName());
executeSql(getTableName(), sqlNotNull);
}
}

View File

@ -22,6 +22,8 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.ColumnMapRowMapper;
@ -35,13 +37,23 @@ public class RenameColumnTask extends BaseTableTask<RenameColumnTask> {
private static final Logger ourLog = LoggerFactory.getLogger(RenameColumnTask.class);
private String myOldName;
private String myNewName;
private boolean myAllowNeitherColumnToExist;
private boolean myIsOkayIfNeitherColumnExists;
private boolean myDeleteTargetColumnFirstIfBothExist;
public RenameColumnTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
public void setDeleteTargetColumnFirstIfBothExist(boolean theDeleteTargetColumnFirstIfBothExist) {
myDeleteTargetColumnFirstIfBothExist = theDeleteTargetColumnFirstIfBothExist;
}
@Override
public void validate() {
super.validate();
setDescription("Rename column " + myOldName + " to " + myNewName + " on table " + getTableName());
}
public void setOldName(String theOldName) {
Validate.notBlank(theOldName);
myOldName = theOldName;
@ -70,19 +82,19 @@ public class RenameColumnTask extends BaseTableTask<RenameColumnTask> {
throw new SQLException("Can not rename " + getTableName() + "." + myOldName + " to " + myNewName + " because both columns exist and data exists in " + myNewName);
}
ourLog.info("Table {} has columns {} and {} - Going to drop {} before renaming", getTableName(), myOldName, myNewName, myNewName);
logInfo(ourLog, "Table {} has columns {} and {} - Going to drop {} before renaming", getTableName(), myOldName, myNewName, myNewName);
String sql = DropColumnTask.createSql(getTableName(), myNewName);
executeSql(getTableName(), sql);
} else {
throw new SQLException("Can not rename " + getTableName() + "." + myOldName + " to " + myNewName + " because both columns exist!");
}
} else if (!haveOldName && !haveNewName) {
if (isAllowNeitherColumnToExist()) {
if (isOkayIfNeitherColumnExists()) {
return;
}
throw new SQLException("Can not rename " + getTableName() + "." + myOldName + " to " + myNewName + " because neither column exists!");
} else if (haveNewName) {
ourLog.info("Column {} already exists on table {} - No action performed", myNewName, getTableName());
logInfo(ourLog, "Column {} already exists on table {} - No action performed", myNewName, getTableName());
return;
}
@ -111,16 +123,44 @@ public class RenameColumnTask extends BaseTableTask<RenameColumnTask> {
throw new IllegalStateException();
}
ourLog.info("Renaming column {} on table {} to {}", myOldName, getTableName(), myNewName);
logInfo(ourLog, "Renaming column {} on table {} to {}", myOldName, getTableName(), myNewName);
executeSql(getTableName(), sql);
}
public boolean isAllowNeitherColumnToExist() {
return myAllowNeitherColumnToExist;
public boolean isOkayIfNeitherColumnExists() {
return myIsOkayIfNeitherColumnExists;
}
public void setAllowNeitherColumnToExist(boolean theAllowNeitherColumnToExist) {
myAllowNeitherColumnToExist = theAllowNeitherColumnToExist;
public void setOkayIfNeitherColumnExists(boolean theOkayIfNeitherColumnExists) {
myIsOkayIfNeitherColumnExists = theOkayIfNeitherColumnExists;
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
RenameColumnTask that = (RenameColumnTask) theO;
return new EqualsBuilder()
.appendSuper(super.equals(theO))
.append(myIsOkayIfNeitherColumnExists, that.myIsOkayIfNeitherColumnExists)
.append(myDeleteTargetColumnFirstIfBothExist, that.myDeleteTargetColumnFirstIfBothExist)
.append(myOldName, that.myOldName)
.append(myNewName, that.myNewName)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.appendSuper(super.hashCode())
.append(myOldName)
.append(myNewName)
.append(myIsOkayIfNeitherColumnExists)
.append(myDeleteTargetColumnFirstIfBothExist)
.toHashCode();
}
}

View File

@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTableColumnTypeTask;
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
import ca.uhn.fhir.jpa.model.entity.*;
import ca.uhn.fhir.util.VersionEnum;
@ -49,32 +50,32 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.map(FlagEnum::fromCommandLineValue)
.collect(Collectors.toSet());
init330();
init340();
init350();
init360();
init400();
init410();
init330(); // 20180114 - 20180329
init340(); // 20180401 - 20180528
init350(); // 20180601 - 20180917
init360(); // 20180918 - 20181112
init400(); // 20190401 - 20190814
init410(); // 20190815 - present
}
protected void init410() {
protected void init410() { // 20190815 - present
Builder version = forVersion(VersionEnum.V4_1_0);
version.onTable("HFJ_SPIDX_NUMBER").modifyColumn("RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_COORDS").modifyColumn("RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_TOKEN").modifyColumn("RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_STRING").modifyColumn("RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_DATE").modifyColumn("RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_QUANTITY").modifyColumn("RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_URI").modifyColumn("RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_NUMBER").modifyColumn("20190920.1", "RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_COORDS").modifyColumn("20190920.2", "RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_TOKEN").modifyColumn("20190920.3", "RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_STRING").modifyColumn("20190920.4", "RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_DATE").modifyColumn("20190920.5", "RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_QUANTITY").modifyColumn("20190920.6", "RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_URI").modifyColumn("20190920.7", "RES_ID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
// HFJ_SEARCH
version.onTable("HFJ_SEARCH").addColumn("EXPIRY_OR_NULL").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP);
version.onTable("HFJ_SEARCH").addColumn("NUM_BLOCKED").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.INT);
version.onTable("HFJ_SEARCH").addColumn("20190921.1", "EXPIRY_OR_NULL").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP);
version.onTable("HFJ_SEARCH").addColumn("20190921.2", "NUM_BLOCKED").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.INT);
// HFJ_BLK_EXPORT_JOB
version.addIdGenerator("SEQ_BLKEXJOB_PID");
Builder.BuilderAddTableByColumns bulkExportJob = version.addTableByColumns("HFJ_BLK_EXPORT_JOB", "PID");
version.addIdGenerator("20190921.3", "SEQ_BLKEXJOB_PID");
Builder.BuilderAddTableByColumns bulkExportJob = version.addTableByColumns("20190921.4", "HFJ_BLK_EXPORT_JOB", "PID");
bulkExportJob.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
bulkExportJob.addColumn("JOB_ID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 36);
bulkExportJob.addColumn("JOB_STATUS").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 10);
@ -85,103 +86,103 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
bulkExportJob.addColumn("OPTLOCK").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.INT);
bulkExportJob.addColumn("EXP_SINCE").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP);
bulkExportJob.addColumn("STATUS_MESSAGE").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
bulkExportJob.addIndex("IDX_BLKEX_EXPTIME").unique(false).withColumns("EXP_TIME");
bulkExportJob.addIndex("IDX_BLKEX_JOB_ID").unique(true).withColumns("JOB_ID");
bulkExportJob.addIndex("20190921.5", "IDX_BLKEX_EXPTIME").unique(false).withColumns("EXP_TIME");
bulkExportJob.addIndex("20190921.6", "IDX_BLKEX_JOB_ID").unique(true).withColumns("JOB_ID");
// HFJ_BLK_EXPORT_COLLECTION
version.addIdGenerator("SEQ_BLKEXCOL_PID");
Builder.BuilderAddTableByColumns bulkExportCollection = version.addTableByColumns("HFJ_BLK_EXPORT_COLLECTION", "PID");
version.addIdGenerator("20190921.7", "SEQ_BLKEXCOL_PID");
Builder.BuilderAddTableByColumns bulkExportCollection = version.addTableByColumns("20190921.8", "HFJ_BLK_EXPORT_COLLECTION", "PID");
bulkExportCollection.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
bulkExportCollection.addColumn("JOB_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
bulkExportCollection.addForeignKey("FK_BLKEXCOL_JOB").toColumn("JOB_PID").references("HFJ_BLK_EXPORT_JOB", "PID");
bulkExportCollection.addForeignKey("20190921.9", "FK_BLKEXCOL_JOB").toColumn("JOB_PID").references("HFJ_BLK_EXPORT_JOB", "PID");
bulkExportCollection.addColumn("RES_TYPE").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
bulkExportCollection.addColumn("TYPE_FILTER").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 1000);
bulkExportCollection.addColumn("OPTLOCK").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.INT);
// HFJ_BLK_EXPORT_COLFILE
version.addIdGenerator("SEQ_BLKEXCOLFILE_PID");
Builder.BuilderAddTableByColumns bulkExportCollectionFile = version.addTableByColumns("HFJ_BLK_EXPORT_COLFILE", "PID");
version.addIdGenerator("20190921.10", "SEQ_BLKEXCOLFILE_PID");
Builder.BuilderAddTableByColumns bulkExportCollectionFile = version.addTableByColumns("20190921.11", "HFJ_BLK_EXPORT_COLFILE", "PID");
bulkExportCollectionFile.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
bulkExportCollectionFile.addColumn("COLLECTION_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
bulkExportCollectionFile.addColumn("RES_ID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
bulkExportCollectionFile.addForeignKey("FK_BLKEXCOLFILE_COLLECT").toColumn("COLLECTION_PID").references("HFJ_BLK_EXPORT_COLLECTION", "PID");
bulkExportCollectionFile.addForeignKey("20190921.12", "FK_BLKEXCOLFILE_COLLECT").toColumn("COLLECTION_PID").references("HFJ_BLK_EXPORT_COLLECTION", "PID");
// HFJ_RES_VER_PROV
version.startSectionWithMessage("Processing bulkExportCollectionFile: HFJ_RES_VER_PROV");
Builder.BuilderAddTableByColumns resVerProv = version.addTableByColumns("HFJ_RES_VER_PROV", "RES_VER_PID");
Builder.BuilderAddTableByColumns resVerProv = version.addTableByColumns("20190921.13", "HFJ_RES_VER_PROV", "RES_VER_PID");
resVerProv.addColumn("RES_VER_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
resVerProv
.addForeignKey("FK_RESVERPROV_RESVER_PID")
.addForeignKey("20190921.14", "FK_RESVERPROV_RESVER_PID")
.toColumn("RES_VER_PID")
.references("HFJ_RES_VER", "PID");
resVerProv.addColumn("RES_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
resVerProv
.addForeignKey("FK_RESVERPROV_RES_PID")
.addForeignKey("20190921.15", "FK_RESVERPROV_RES_PID")
.toColumn("RES_PID")
.references("HFJ_RESOURCE", "RES_ID");
resVerProv.addColumn("SOURCE_URI").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
resVerProv.addColumn("REQUEST_ID").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 16);
resVerProv.addIndex("IDX_RESVERPROV_SOURCEURI").unique(false).withColumns("SOURCE_URI");
resVerProv.addIndex("IDX_RESVERPROV_REQUESTID").unique(false).withColumns("REQUEST_ID");
resVerProv.addIndex("20190921.16", "IDX_RESVERPROV_SOURCEURI").unique(false).withColumns("SOURCE_URI");
resVerProv.addIndex("20190921.17", "IDX_RESVERPROV_REQUESTID").unique(false).withColumns("REQUEST_ID");
// TermValueSetConceptDesignation
version.startSectionWithMessage("Processing bulkExportCollectionFile: TRM_VALUESET_C_DESIGNATION");
Builder.BuilderWithTableName termValueSetConceptDesignationTable = version.onTable("TRM_VALUESET_C_DESIGNATION");
termValueSetConceptDesignationTable.addColumn("VALUESET_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
termValueSetConceptDesignationTable.addColumn("20190921.18", "VALUESET_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
termValueSetConceptDesignationTable
.addForeignKey("FK_TRM_VSCD_VS_PID")
.addForeignKey("20190921.19", "FK_TRM_VSCD_VS_PID")
.toColumn("VALUESET_PID")
.references("TRM_VALUESET", "PID");
// Drop HFJ_SEARCH_RESULT foreign keys
version.onTable("HFJ_SEARCH_RESULT").dropForeignKey("FK_SEARCHRES_RES", "HFJ_RESOURCE");
version.onTable("HFJ_SEARCH_RESULT").dropForeignKey("FK_SEARCHRES_SEARCH", "HFJ_SEARCH");
version.onTable("HFJ_SEARCH_RESULT").dropForeignKey("20190921.20", "FK_SEARCHRES_RES", "HFJ_RESOURCE");
version.onTable("HFJ_SEARCH_RESULT").dropForeignKey("20190921.21", "FK_SEARCHRES_SEARCH", "HFJ_SEARCH");
// TermValueSet
version.startSectionWithMessage("Processing bulkExportCollectionFile: TRM_VALUESET");
Builder.BuilderWithTableName termValueSetTable = version.onTable("TRM_VALUESET");
termValueSetTable.addColumn("TOTAL_CONCEPTS").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
termValueSetTable.addColumn("TOTAL_CONCEPT_DESIGNATIONS").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
termValueSetTable.addColumn("20190921.22", "TOTAL_CONCEPTS").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
termValueSetTable.addColumn("20190921.23", "TOTAL_CONCEPT_DESIGNATIONS").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
termValueSetTable
.dropIndex("IDX_VALUESET_EXP_STATUS");
.dropIndex("20190921.24", "IDX_VALUESET_EXP_STATUS");
version.dropIdGenerator("SEQ_SEARCHPARM_ID");
version.dropIdGenerator("20190921.25", "SEQ_SEARCHPARM_ID");
// TermValueSetConcept
version.startSectionWithMessage("Processing bulkExportCollectionFile: TRM_VALUESET_CONCEPT");
Builder.BuilderWithTableName termValueSetConceptTable = version.onTable("TRM_VALUESET_CONCEPT");
termValueSetConceptTable.addColumn("VALUESET_ORDER").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.INT);
termValueSetConceptTable.addColumn("20190921.26", "VALUESET_ORDER").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.INT);
termValueSetConceptTable
.addIndex("IDX_VS_CONCEPT_ORDER")
.addIndex("20190921.27", "IDX_VS_CONCEPT_ORDER")
.unique(true)
.withColumns("VALUESET_PID", "VALUESET_ORDER");
// Account for RESTYPE_LEN column increasing from 30 to 40
version.onTable("HFJ_RESOURCE").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_VER").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_HISTORY_TAG").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_LINK").modifyColumn("SOURCE_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_LINK").modifyColumn("TARGET_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_TAG").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RESOURCE").modifyColumn("20191002.1", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_VER").modifyColumn("20191002.2", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_HISTORY_TAG").modifyColumn("20191002.3", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_LINK").modifyColumn("20191002.4", "SOURCE_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_LINK").modifyColumn("20191002.5", "TARGET_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_TAG").modifyColumn("20191002.6", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
// TermConceptDesignation
version.startSectionWithMessage("Processing table: TRM_CONCEPT_DESIG");
version.onTable("TRM_CONCEPT_DESIG").modifyColumn("VAL").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 2000);
version.onTable("TRM_CONCEPT_DESIG").modifyColumn("20191002.7", "VAL").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 2000);
// TermValueSetConceptDesignation
version.startSectionWithMessage("Processing table: TRM_VALUESET_C_DESIGNATION");
version.onTable("TRM_VALUESET_C_DESIGNATION").modifyColumn("VAL").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 2000);
version.onTable("TRM_VALUESET_C_DESIGNATION").modifyColumn("20191002.8", "VAL").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 2000);
// TermConceptProperty
version.startSectionWithMessage("Processing table: TRM_CONCEPT_PROPERTY");
version.onTable("TRM_CONCEPT_PROPERTY").addColumn("PROP_VAL_LOB").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.BLOB);
version.onTable("TRM_CONCEPT_PROPERTY").addColumn("20191002.9", "PROP_VAL_LOB").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.BLOB);
}
protected void init400() {
protected void init400() { // 20190401 - 20190814
Builder version = forVersion(VersionEnum.V4_0_0);
// BinaryStorageEntity
Builder.BuilderAddTableByColumns binaryBlob = version.addTableByColumns("HFJ_BINARY_STORAGE_BLOB", "BLOB_ID");
Builder.BuilderAddTableByColumns binaryBlob = version.addTableByColumns("20190722.1", "HFJ_BINARY_STORAGE_BLOB", "BLOB_ID");
binaryBlob.addColumn("BLOB_ID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
binaryBlob.addColumn("RESOURCE_ID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
binaryBlob.addColumn("BLOB_SIZE").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.INT);
@ -191,121 +192,121 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
binaryBlob.addColumn("BLOB_HASH").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 128);
// Interim builds used this name
version.onTable("TRM_VALUESET_CODE").dropThisTable();
version.onTable("TRM_VALUESET_CODE").dropThisTable("20190722.2");
version.onTable("TRM_CONCEPT_MAP_GROUP")
.renameColumn("myConceptMapUrl", "CONCEPT_MAP_URL", false, true)
.renameColumn("mySourceValueSet", "SOURCE_VS", false, true)
.renameColumn("myTargetValueSet", "TARGET_VS", false, true);
.renameColumn("20190722.3", "myConceptMapUrl", "CONCEPT_MAP_URL", false, true)
.renameColumn("20190722.4", "mySourceValueSet", "SOURCE_VS", false, true)
.renameColumn("20190722.5", "myTargetValueSet", "TARGET_VS", false, true);
version.onTable("TRM_CONCEPT_MAP_GROUP")
.modifyColumn("CONCEPT_MAP_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
.modifyColumn("20190722.6", "CONCEPT_MAP_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CONCEPT_MAP_GROUP")
.modifyColumn("SOURCE_VERSION").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
.modifyColumn("20190722.7", "SOURCE_VERSION").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CONCEPT_MAP_GROUP")
.modifyColumn("SOURCE_VS").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
.modifyColumn("20190722.8", "SOURCE_VS").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CONCEPT_MAP_GROUP")
.modifyColumn("TARGET_VERSION").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
.modifyColumn("20190722.9", "TARGET_VERSION").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CONCEPT_MAP_GROUP")
.modifyColumn("TARGET_VS").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
.modifyColumn("20190722.10", "TARGET_VS").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
.renameColumn("myConceptMapUrl", "CONCEPT_MAP_URL", false, true)
.renameColumn("mySystem", "SYSTEM_URL", false, true)
.renameColumn("mySystemVersion", "SYSTEM_VERSION", false, true)
.renameColumn("myValueSet", "VALUESET_URL", false, true);
.renameColumn("20190722.11", "myConceptMapUrl", "CONCEPT_MAP_URL", false, true)
.renameColumn("20190722.12", "mySystem", "SYSTEM_URL", false, true)
.renameColumn("20190722.13", "mySystemVersion", "SYSTEM_VERSION", false, true)
.renameColumn("20190722.14", "myValueSet", "VALUESET_URL", false, true);
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
.modifyColumn("CONCEPT_MAP_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
.modifyColumn("20190722.15", "CONCEPT_MAP_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
.modifyColumn("SOURCE_CODE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
.modifyColumn("20190722.16", "SOURCE_CODE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
.modifyColumn("SYSTEM_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
.modifyColumn("20190722.17", "SYSTEM_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
.modifyColumn("SYSTEM_VERSION").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
.modifyColumn("20190722.18", "SYSTEM_VERSION").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
.modifyColumn("VALUESET_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
.modifyColumn("20190722.19", "VALUESET_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.renameColumn("myConceptMapUrl", "CONCEPT_MAP_URL", false, true)
.renameColumn("mySystem", "SYSTEM_URL", false, true)
.renameColumn("mySystemVersion", "SYSTEM_VERSION", false, true)
.renameColumn("myValueSet", "VALUESET_URL", false, true);
.renameColumn("20190722.20", "myConceptMapUrl", "CONCEPT_MAP_URL", false, true)
.renameColumn("20190722.21", "mySystem", "SYSTEM_URL", false, true)
.renameColumn("20190722.22", "mySystemVersion", "SYSTEM_VERSION", false, true)
.renameColumn("20190722.23", "myValueSet", "VALUESET_URL", false, true);
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.modifyColumn("CONCEPT_MAP_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
.modifyColumn("20190722.24", "CONCEPT_MAP_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.modifyColumn("SYSTEM_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
.modifyColumn("20190722.25", "SYSTEM_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.modifyColumn("SYSTEM_VERSION").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
.modifyColumn("20190722.26", "SYSTEM_VERSION").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.modifyColumn("TARGET_CODE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
.modifyColumn("20190722.27", "TARGET_CODE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.modifyColumn("VALUESET_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
.modifyColumn("20190722.28", "VALUESET_URL").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CONCEPT")
.renameColumn("CODE", "CODEVAL", false, true);
.renameColumn("20190722.29", "CODE", "CODEVAL", false, true);
// TermValueSet
version.startSectionWithMessage("Processing table: TRM_VALUESET");
version.addIdGenerator("SEQ_VALUESET_PID");
Builder.BuilderAddTableByColumns termValueSetTable = version.addTableByColumns("TRM_VALUESET", "PID");
version.addIdGenerator("20190722.30", "SEQ_VALUESET_PID");
Builder.BuilderAddTableByColumns termValueSetTable = version.addTableByColumns("20190722.31", "TRM_VALUESET", "PID");
termValueSetTable.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
termValueSetTable.addColumn("URL").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
termValueSetTable
.addIndex("IDX_VALUESET_URL")
.addIndex("20190722.32", "IDX_VALUESET_URL")
.unique(true)
.withColumns("URL");
termValueSetTable.addColumn("RES_ID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
termValueSetTable
.addForeignKey("FK_TRMVALUESET_RES")
.addForeignKey("20190722.33", "FK_TRMVALUESET_RES")
.toColumn("RES_ID")
.references("HFJ_RESOURCE", "RES_ID");
termValueSetTable.addColumn("NAME").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_VALUESET")
.renameColumn("NAME", "VSNAME", true, true);
.renameColumn("20190722.34", "NAME", "VSNAME", true, true);
version.onTable("TRM_VALUESET")
.modifyColumn("RES_ID").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
.modifyColumn("20190722.35", "RES_ID").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
Builder.BuilderWithTableName termValueSetTableChange = version.onTable("TRM_VALUESET");
termValueSetTableChange.addColumn("EXPANSION_STATUS").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 50);
termValueSetTableChange.addColumn("20190722.36", "EXPANSION_STATUS").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 50);
termValueSetTableChange
.addIndex("IDX_VALUESET_EXP_STATUS")
.addIndex("20190722.37", "IDX_VALUESET_EXP_STATUS")
.unique(false)
.withColumns("EXPANSION_STATUS");
// TermValueSetConcept
version.startSectionWithMessage("Processing table: TRM_VALUESET_CONCEPT");
version.addIdGenerator("SEQ_VALUESET_CONCEPT_PID");
Builder.BuilderAddTableByColumns termValueSetConceptTable = version.addTableByColumns("TRM_VALUESET_CONCEPT", "PID");
version.addIdGenerator("20190722.38", "SEQ_VALUESET_CONCEPT_PID");
Builder.BuilderAddTableByColumns termValueSetConceptTable = version.addTableByColumns("20190722.39", "TRM_VALUESET_CONCEPT", "PID");
termValueSetConceptTable.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
termValueSetConceptTable.addColumn("VALUESET_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
termValueSetConceptTable
.addForeignKey("FK_TRM_VALUESET_PID")
.addForeignKey("20190722.40", "FK_TRM_VALUESET_PID")
.toColumn("VALUESET_PID")
.references("TRM_VALUESET", "PID");
termValueSetConceptTable.addColumn("SYSTEM_URL").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
termValueSetConceptTable.addColumn("CODEVAL").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
termValueSetConceptTable.addColumn("DISPLAY").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 400);
version.onTable("TRM_VALUESET_CONCEPT")
.renameColumn("CODE", "CODEVAL", true, true)
.renameColumn("SYSTEM", "SYSTEM_URL", true, true);
.renameColumn("20190722.41", "CODE", "CODEVAL", true, true)
.renameColumn("20190722.42", "SYSTEM", "SYSTEM_URL", true, true);
version.startSectionWithMessage("Processing table: TRM_VALUESET_CONCEPT, swapping index for unique constraint");
termValueSetConceptTable.dropIndex("IDX_VALUESET_CONCEPT_CS_CD");
termValueSetConceptTable.dropIndex("20190801.1", "IDX_VALUESET_CONCEPT_CS_CD");
termValueSetConceptTable
.addIndex("IDX_VS_CONCEPT_CS_CD")
.addIndex("20190801.2", "IDX_VS_CONCEPT_CS_CD")
.unique(true)
.withColumns("VALUESET_PID", "SYSTEM_URL", "CODEVAL");
// TermValueSetConceptDesignation
version.startSectionWithMessage("Processing table: TRM_VALUESET_C_DESIGNATION");
version.addIdGenerator("SEQ_VALUESET_C_DSGNTN_PID");
Builder.BuilderAddTableByColumns termValueSetConceptDesignationTable = version.addTableByColumns("TRM_VALUESET_C_DESIGNATION", "PID");
version.addIdGenerator("20190801.3", "SEQ_VALUESET_C_DSGNTN_PID");
Builder.BuilderAddTableByColumns termValueSetConceptDesignationTable = version.addTableByColumns("20190801.4", "TRM_VALUESET_C_DESIGNATION", "PID");
termValueSetConceptDesignationTable.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
termValueSetConceptDesignationTable.addColumn("VALUESET_CONCEPT_PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
termValueSetConceptDesignationTable
.addForeignKey("FK_TRM_VALUESET_CONCEPT_PID")
.addForeignKey("20190801.5", "FK_TRM_VALUESET_CONCEPT_PID")
.toColumn("VALUESET_CONCEPT_PID")
.references("TRM_VALUESET_CONCEPT", "PID");
termValueSetConceptDesignationTable.addColumn("LANG").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
@ -314,18 +315,18 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
termValueSetConceptDesignationTable.addColumn("USE_DISPLAY").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
termValueSetConceptDesignationTable.addColumn("VAL").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
termValueSetConceptDesignationTable
.addIndex("IDX_VALUESET_C_DSGNTN_VAL")
.addIndex("20190801.6", "IDX_VALUESET_C_DSGNTN_VAL")
.unique(false)
.withColumns("VAL");
// TermCodeSystemVersion
version.startSectionWithMessage("Processing table: TRM_CODESYSTEM_VER");
Builder.BuilderWithTableName termCodeSystemVersionTable = version.onTable("TRM_CODESYSTEM_VER");
termCodeSystemVersionTable.addColumn("CS_DISPLAY").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
termCodeSystemVersionTable.addColumn("20190814.1", "CS_DISPLAY").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
// ResourceReindexJobEntry
version.addIdGenerator("SEQ_RES_REINDEX_JOB");
Builder.BuilderAddTableByColumns reindex = version.addTableByColumns("HFJ_RES_REINDEX_JOB", "PID");
version.addIdGenerator("20190814.2", "SEQ_RES_REINDEX_JOB");
Builder.BuilderAddTableByColumns reindex = version.addTableByColumns("20190814.3", "HFJ_RES_REINDEX_JOB", "PID");
reindex.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
reindex.addColumn("RES_TYPE").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
reindex.addColumn("UPDATE_THRESHOLD_HIGH").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP);
@ -336,44 +337,44 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Search
version.onTable("HFJ_SEARCH")
.addColumn("SEARCH_DELETED").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.BOOLEAN);
.addColumn("20190814.4", "SEARCH_DELETED").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.BOOLEAN);
version.onTable("HFJ_SEARCH")
.modifyColumn("SEARCH_LAST_RETURNED").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP);
.modifyColumn("20190814.5", "SEARCH_LAST_RETURNED").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP);
version.onTable("HFJ_SEARCH")
.addColumn("SEARCH_PARAM_MAP").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.BLOB);
.addColumn("20190814.6", "SEARCH_PARAM_MAP").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.BLOB);
version.onTable("HFJ_SEARCH")
.modifyColumn("SEARCH_UUID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 36);
.modifyColumn("20190814.7", "SEARCH_UUID").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 36);
version.onTable("HFJ_SEARCH_PARM").dropThisTable();
version.onTable("HFJ_SEARCH_PARM").dropThisTable("20190814.8");
version.onTable("HFJ_SPIDX_COORDS").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_DATE").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_STRING").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_STRING").addColumn("HASH_IDENTITY").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_STRING").addIndex("IDX_SP_STRING_HASH_IDENT").unique(false).withColumns("HASH_IDENTITY");
version.onTable("HFJ_SPIDX_COORDS").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_QUANTITY").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("HASH_UNITS_AND_VALPREFIX");
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("HASH_VALPREFIX");
version.onTable("HFJ_SPIDX_NUMBER").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_TOKEN").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_URI").modifyColumn("RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_URI").modifyColumn("SP_URI").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 254);
version.onTable("HFJ_SPIDX_COORDS").modifyColumn("20190814.9", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_DATE").modifyColumn("20190814.10", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_STRING").modifyColumn("20190814.11", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_STRING").addColumn("20190814.12", "HASH_IDENTITY").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("HFJ_SPIDX_STRING").addIndex("20190814.13", "IDX_SP_STRING_HASH_IDENT").unique(false).withColumns("HASH_IDENTITY");
version.onTable("HFJ_SPIDX_COORDS").modifyColumn("20190814.14", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_QUANTITY").modifyColumn("20190814.15", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.16", "HASH_UNITS_AND_VALPREFIX");
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.17", "HASH_VALPREFIX");
version.onTable("HFJ_SPIDX_NUMBER").modifyColumn("20190814.18", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_TOKEN").modifyColumn("20190814.19", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_URI").modifyColumn("20190814.20", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
version.onTable("HFJ_SPIDX_URI").modifyColumn("20190814.21", "SP_URI").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 254);
version.onTable("TRM_CODESYSTEM").modifyColumn("CODE_SYSTEM_URI").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CODESYSTEM").modifyColumn("CS_NAME").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CODESYSTEM_VER").modifyColumn("CS_VERSION_ID").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CODESYSTEM").modifyColumn("20190814.22", "CODE_SYSTEM_URI").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CODESYSTEM").modifyColumn("20190814.23", "CS_NAME").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("TRM_CODESYSTEM_VER").modifyColumn("20190814.24", "CS_VERSION_ID").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
}
private void init360() {
private void init360() { // 20180918 - 20181112
Builder version = forVersion(VersionEnum.V3_6_0);
// Resource Link
Builder.BuilderWithTableName resourceLink = version.onTable("HFJ_RES_LINK");
version.startSectionWithMessage("Starting work on table: " + resourceLink.getTableName());
resourceLink
.modifyColumn("SRC_PATH")
.modifyColumn("20180929.1", "SRC_PATH")
.nonNullable()
.withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
@ -381,11 +382,11 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
Builder.BuilderWithTableName search = version.onTable("HFJ_SEARCH");
version.startSectionWithMessage("Starting work on table: " + search.getTableName());
search
.addColumn("OPTLOCK_VERSION")
.addColumn("20181001.1", "OPTLOCK_VERSION")
.nullable()
.type(BaseTableColumnTypeTask.ColumnTypeEnum.INT);
version.addTableRawSql("HFJ_RES_REINDEX_JOB")
version.addTableRawSql("20181104.1", "HFJ_RES_REINDEX_JOB")
.addSql(DriverTypeEnum.MSSQL_2012, "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime2, UPDATE_THRESHOLD_HIGH datetime2 not null, UPDATE_THRESHOLD_LOW datetime2, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED boolean not null, RES_TYPE varchar(255), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))")
.addSql(DriverTypeEnum.MARIADB_10_1, "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime(6), UPDATE_THRESHOLD_HIGH datetime(6) not null, UPDATE_THRESHOLD_LOW datetime(6), primary key (PID))")
@ -393,17 +394,17 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.addSql(DriverTypeEnum.MYSQL_5_7, " create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime(6), UPDATE_THRESHOLD_HIGH datetime(6) not null, UPDATE_THRESHOLD_LOW datetime(6), primary key (PID))")
.addSql(DriverTypeEnum.ORACLE_12C, "create table HFJ_RES_REINDEX_JOB (PID number(19,0) not null, JOB_DELETED number(1,0) not null, RES_TYPE varchar2(255 char), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))");
version.onTable("TRM_CONCEPT_DESIG").addColumn("CS_VER_PID").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("TRM_CONCEPT_DESIG").addForeignKey("FK_CONCEPTDESIG_CSV").toColumn("CS_VER_PID").references("TRM_CODESYSTEM_VER", "PID");
version.onTable("TRM_CONCEPT_DESIG").addColumn("20181104.2", "CS_VER_PID").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("TRM_CONCEPT_DESIG").addForeignKey("20181104.3", "FK_CONCEPTDESIG_CSV").toColumn("CS_VER_PID").references("TRM_CODESYSTEM_VER", "PID");
version.onTable("TRM_CONCEPT_PROPERTY").addColumn("CS_VER_PID").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("TRM_CONCEPT_PROPERTY").addForeignKey("FK_CONCEPTPROP_CSV").toColumn("CS_VER_PID").references("TRM_CODESYSTEM_VER", "PID");
version.onTable("TRM_CONCEPT_PROPERTY").addColumn("20181104.4", "CS_VER_PID").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
version.onTable("TRM_CONCEPT_PROPERTY").addForeignKey("20181104.5", "FK_CONCEPTPROP_CSV").toColumn("CS_VER_PID").references("TRM_CODESYSTEM_VER", "PID");
version.onTable("TRM_CONCEPT").addColumn("PARENT_PIDS").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.CLOB);
version.onTable("TRM_CONCEPT").addColumn("20181104.6", "PARENT_PIDS").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.CLOB);
}
private void init350() {
private void init350() { // 20180601 - 20180917
Builder version = forVersion(VersionEnum.V3_5_0);
// Forced ID changes
@ -411,12 +412,12 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.startSectionWithMessage("Starting work on table: " + forcedId.getTableName());
forcedId
.dropIndex("IDX_FORCEDID_TYPE_FORCEDID");
.dropIndex("20180827.1", "IDX_FORCEDID_TYPE_FORCEDID");
forcedId
.dropIndex("IDX_FORCEDID_TYPE_RESID");
.dropIndex("20180827.2", "IDX_FORCEDID_TYPE_RESID");
forcedId
.addIndex("IDX_FORCEDID_TYPE_FID")
.addIndex("20180827.3", "IDX_FORCEDID_TYPE_FID")
.unique(true)
.withColumns("RESOURCE_TYPE", "FORCED_ID");
@ -424,18 +425,18 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
Builder.BuilderWithTableName spidxCoords = version.onTable("HFJ_SPIDX_COORDS");
version.startSectionWithMessage("Starting work on table: " + spidxCoords.getTableName());
spidxCoords
.addColumn("HASH_IDENTITY")
.addColumn("20180903.1", "HASH_IDENTITY")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
spidxCoords
.dropIndex("IDX_SP_COORDS");
.dropIndex("20180903.2", "IDX_SP_COORDS");
spidxCoords
.addIndex("IDX_SP_COORDS_HASH")
.addIndex("20180903.4", "IDX_SP_COORDS_HASH")
.unique(false)
.withColumns("HASH_IDENTITY", "SP_LATITUDE", "SP_LONGITUDE");
spidxCoords
.addTask(new CalculateHashesTask()
.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.5")
.setColumnName("HASH_IDENTITY")
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
);
@ -445,20 +446,20 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
Builder.BuilderWithTableName spidxDate = version.onTable("HFJ_SPIDX_DATE");
version.startSectionWithMessage("Starting work on table: " + spidxDate.getTableName());
spidxDate
.addColumn("HASH_IDENTITY")
.addColumn("20180903.6", "HASH_IDENTITY")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
spidxDate
.dropIndex("IDX_SP_TOKEN");
.dropIndex("20180903.7", "IDX_SP_TOKEN");
spidxDate
.addIndex("IDX_SP_DATE_HASH")
.addIndex("20180903.8", "IDX_SP_DATE_HASH")
.unique(false)
.withColumns("HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH");
spidxDate
.dropIndex("IDX_SP_DATE");
.dropIndex("20180903.9", "IDX_SP_DATE");
spidxDate
.addTask(new CalculateHashesTask()
.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.10")
.setColumnName("HASH_IDENTITY")
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
);
@ -468,18 +469,18 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
Builder.BuilderWithTableName spidxNumber = version.onTable("HFJ_SPIDX_NUMBER");
version.startSectionWithMessage("Starting work on table: " + spidxNumber.getTableName());
spidxNumber
.addColumn("HASH_IDENTITY")
.addColumn("20180903.11", "HASH_IDENTITY")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
spidxNumber
.dropIndex("IDX_SP_NUMBER");
.dropIndex("20180903.12", "IDX_SP_NUMBER");
spidxNumber
.addIndex("IDX_SP_NUMBER_HASH_VAL")
.addIndex("20180903.13", "IDX_SP_NUMBER_HASH_VAL")
.unique(false)
.withColumns("HASH_IDENTITY", "SP_VALUE");
spidxNumber
.addTask(new CalculateHashesTask()
.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.14")
.setColumnName("HASH_IDENTITY")
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
);
@ -489,34 +490,34 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
Builder.BuilderWithTableName spidxQuantity = version.onTable("HFJ_SPIDX_QUANTITY");
version.startSectionWithMessage("Starting work on table: " + spidxQuantity.getTableName());
spidxQuantity
.addColumn("HASH_IDENTITY")
.addColumn("20180903.15", "HASH_IDENTITY")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
spidxQuantity
.addColumn("HASH_IDENTITY_SYS_UNITS")
.addColumn("20180903.16", "HASH_IDENTITY_SYS_UNITS")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
spidxQuantity
.addColumn("HASH_IDENTITY_AND_UNITS")
.addColumn("20180903.17", "HASH_IDENTITY_AND_UNITS")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
spidxQuantity
.dropIndex("IDX_SP_QUANTITY");
.dropIndex("20180903.18", "IDX_SP_QUANTITY");
spidxQuantity
.addIndex("IDX_SP_QUANTITY_HASH")
.addIndex("20180903.19", "IDX_SP_QUANTITY_HASH")
.unique(false)
.withColumns("HASH_IDENTITY", "SP_VALUE");
spidxQuantity
.addIndex("IDX_SP_QUANTITY_HASH_UN")
.addIndex("20180903.20", "IDX_SP_QUANTITY_HASH_UN")
.unique(false)
.withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE");
spidxQuantity
.addIndex("IDX_SP_QUANTITY_HASH_SYSUN")
.addIndex("20180903.21", "IDX_SP_QUANTITY_HASH_SYSUN")
.unique(false)
.withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE");
spidxQuantity
.addTask(new CalculateHashesTask()
.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.22")
.setColumnName("HASH_IDENTITY")
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
.addCalculator("HASH_IDENTITY_AND_UNITS", t -> ResourceIndexedSearchParamQuantity.calculateHashUnits(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_UNITS")))
@ -528,26 +529,26 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
Builder.BuilderWithTableName spidxString = version.onTable("HFJ_SPIDX_STRING");
version.startSectionWithMessage("Starting work on table: " + spidxString.getTableName());
spidxString
.addColumn("HASH_NORM_PREFIX")
.addColumn("20180903.23", "HASH_NORM_PREFIX")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
spidxString
.dropIndex("IDX_SP_STRING");
.dropIndex("20180903.24", "IDX_SP_STRING");
spidxString
.addIndex("IDX_SP_STRING_HASH_NRM")
.addIndex("20180903.25", "IDX_SP_STRING_HASH_NRM")
.unique(false)
.withColumns("HASH_NORM_PREFIX", "SP_VALUE_NORMALIZED");
spidxString
.addColumn("HASH_EXACT")
.addColumn("20180903.26", "HASH_EXACT")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
spidxString
.addIndex("IDX_SP_STRING_HASH_EXCT")
.addIndex("20180903.27","IDX_SP_STRING_HASH_EXCT")
.unique(false)
.withColumns("HASH_EXACT");
.withColumns( "HASH_EXACT");
spidxString
.addTask(new CalculateHashesTask()
.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.28")
.setColumnName("HASH_NORM_PREFIX")
.addCalculator("HASH_NORM_PREFIX", t -> ResourceIndexedSearchParamString.calculateHashNormalized(new ModelConfig(), t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_VALUE_NORMALIZED")))
.addCalculator("HASH_EXACT", t -> ResourceIndexedSearchParamString.calculateHashExact(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE_EXACT")))
@ -558,44 +559,44 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
Builder.BuilderWithTableName spidxToken = version.onTable("HFJ_SPIDX_TOKEN");
version.startSectionWithMessage("Starting work on table: " + spidxToken.getTableName());
spidxToken
.addColumn("HASH_IDENTITY")
.addColumn("20180903.29", "HASH_IDENTITY")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
spidxToken
.addColumn("HASH_SYS")
.addColumn("20180903.30", "HASH_SYS")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
spidxToken
.addColumn("HASH_SYS_AND_VALUE")
.addColumn("20180903.31", "HASH_SYS_AND_VALUE")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
spidxToken
.addColumn("HASH_VALUE")
.addColumn("20180903.32", "HASH_VALUE")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
spidxToken
.dropIndex("IDX_SP_TOKEN");
.dropIndex("20180903.33", "IDX_SP_TOKEN");
spidxToken
.dropIndex("IDX_SP_TOKEN_UNQUAL");
.dropIndex("20180903.34", "IDX_SP_TOKEN_UNQUAL");
spidxToken
.addIndex("IDX_SP_TOKEN_HASH")
.addIndex("20180903.35", "IDX_SP_TOKEN_HASH")
.unique(false)
.withColumns("HASH_IDENTITY");
spidxToken
.addIndex("IDX_SP_TOKEN_HASH_S")
.addIndex("20180903.36","IDX_SP_TOKEN_HASH_S")
.unique(false)
.withColumns("HASH_SYS");
.withColumns( "HASH_SYS");
spidxToken
.addIndex("IDX_SP_TOKEN_HASH_SV")
.addIndex("20180903.37", "IDX_SP_TOKEN_HASH_SV")
.unique(false)
.withColumns("HASH_SYS_AND_VALUE");
spidxToken
.addIndex("IDX_SP_TOKEN_HASH_V")
.addIndex("20180903.38","IDX_SP_TOKEN_HASH_V")
.unique(false)
.withColumns("HASH_VALUE");
.withColumns( "HASH_VALUE");
spidxToken
.addTask(new CalculateHashesTask()
.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.39")
.setColumnName("HASH_IDENTITY")
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
.addCalculator("HASH_SYS", t -> ResourceIndexedSearchParamToken.calculateHashSystem(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM")))
@ -608,24 +609,24 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
Builder.BuilderWithTableName spidxUri = version.onTable("HFJ_SPIDX_URI");
version.startSectionWithMessage("Starting work on table: " + spidxUri.getTableName());
spidxUri
.addColumn("HASH_IDENTITY")
.addColumn("20180903.40", "HASH_IDENTITY")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) {
spidxUri
.addIndex("IDX_SP_URI_HASH_IDENTITY")
.addIndex("20180903.41", "IDX_SP_URI_HASH_IDENTITY")
.unique(false)
.withColumns("HASH_IDENTITY", "SP_URI");
spidxUri
.addColumn("HASH_URI")
.addColumn("20180903.42", "HASH_URI")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
spidxUri
.addIndex("IDX_SP_URI_HASH_URI")
.addIndex("20180903.43","IDX_SP_URI_HASH_URI")
.unique(false)
.withColumns("HASH_URI");
.withColumns( "HASH_URI");
spidxUri
.addTask(new CalculateHashesTask()
.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.44")
.setColumnName("HASH_IDENTITY")
.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")))
.addCalculator("HASH_URI", t -> ResourceIndexedSearchParamUri.calculateHashUri(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_URI")))
@ -635,17 +636,17 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Search Parameter Presence
Builder.BuilderWithTableName spp = version.onTable("HFJ_RES_PARAM_PRESENT");
version.startSectionWithMessage("Starting work on table: " + spp.getTableName());
spp.dropIndex("IDX_RESPARMPRESENT_SPID_RESID");
spp.dropIndex("20180903.45", "IDX_RESPARMPRESENT_SPID_RESID");
spp
.addColumn("HASH_PRESENCE")
.addColumn("20180903.46", "HASH_PRESENCE")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
spp
.addIndex("IDX_RESPARMPRESENT_HASHPRES")
.addIndex("20180903.47", "IDX_RESPARMPRESENT_HASHPRES")
.unique(false)
.withColumns("HASH_PRESENCE");
ArbitrarySqlTask consolidateSearchParamPresenceIndexesTask = new ArbitrarySqlTask("HFJ_SEARCH_PARM", "Consolidate search parameter presence indexes");
ArbitrarySqlTask consolidateSearchParamPresenceIndexesTask = new ArbitrarySqlTask(VersionEnum.V3_5_0, "20180903.48", "HFJ_SEARCH_PARM", "Consolidate search parameter presence indexes");
consolidateSearchParamPresenceIndexesTask.setExecuteOnlyIfTableExists("HFJ_SEARCH_PARM");
consolidateSearchParamPresenceIndexesTask.setBatchSize(1);
@ -667,28 +668,28 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
version.addTask(consolidateSearchParamPresenceIndexesTask);
// SP_ID is no longer needed
spp.dropColumn("SP_ID");
spp.dropColumn("20180903.49", "SP_ID");
// Concept
Builder.BuilderWithTableName trmConcept = version.onTable("TRM_CONCEPT");
version.startSectionWithMessage("Starting work on table: " + trmConcept.getTableName());
trmConcept
.addColumn("CONCEPT_UPDATED")
.addColumn("20180903.50", "CONCEPT_UPDATED")
.nullable()
.type(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMP);
trmConcept
.addIndex("IDX_CONCEPT_UPDATED")
.addIndex("20180903.51", "IDX_CONCEPT_UPDATED")
.unique(false)
.withColumns("CONCEPT_UPDATED");
.withColumns( "CONCEPT_UPDATED");
trmConcept
.modifyColumn("CODE")
.modifyColumn("20180903.52", "CODE")
.nonNullable()
.withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
// Concept Designation
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_DESIG");
version
.addTableRawSql("TRM_CONCEPT_DESIG")
.addTableRawSql("20180907.1", "TRM_CONCEPT_DESIG")
.addSql(DriverTypeEnum.H2_EMBEDDED, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
.addSql(DriverTypeEnum.H2_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
.addSql(DriverTypeEnum.H2_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
@ -714,7 +715,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Concept Property
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_PROPERTY");
version
.addTableRawSql("TRM_CONCEPT_PROPERTY")
.addTableRawSql("20180907.2", "TRM_CONCEPT_PROPERTY")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
@ -737,7 +738,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Concept Map - Map
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP");
version
.addTableRawSql("TRM_CONCEPT_MAP")
.addTableRawSql("20180907.3", "TRM_CONCEPT_MAP")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE")
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
@ -759,7 +760,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Concept Map - Group
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GROUP");
version
.addTableRawSql("TRM_CONCEPT_MAP_GROUP")
.addTableRawSql("20180907.4", "TRM_CONCEPT_MAP_GROUP")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create unique index IDX_CONCEPT_MAP_URL on TRM_CONCEPT_MAP (URL)")
@ -777,7 +778,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Concept Map - Group Element
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GRP_ELEMENT");
version
.addTableRawSql("TRM_CONCEPT_MAP_GRP_ELEMENT")
.addTableRawSql("20180907.5", "TRM_CONCEPT_MAP_GRP_ELEMENT")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP")
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))")
@ -800,7 +801,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Concept Map - Group Element Target
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GRP_ELM_TGT");
version
.addTableRawSql("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.addTableRawSql("20180907.6", "TRM_CONCEPT_MAP_GRP_ELM_TGT")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
@ -820,7 +821,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.addSql(DriverTypeEnum.MSSQL_2012, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT");
version.onTable("HFJ_IDX_CMP_STRING_UNIQ").modifyColumn("IDX_STRING").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
version.onTable("HFJ_IDX_CMP_STRING_UNIQ").modifyColumn("20180907.7", "IDX_STRING").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
}
@ -837,26 +838,26 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
return longValue == 1L;
}
private void init340() {
private void init340() { // 20180401 - 20180528
Builder version = forVersion(VersionEnum.V3_4_0);
// CodeSystem Version
Builder.BuilderWithTableName resourceLink = version.onTable("TRM_CODESYSTEM_VER");
version.startSectionWithMessage("Starting work on table: " + resourceLink.getTableName());
resourceLink
.dropIndex("IDX_CSV_RESOURCEPID_AND_VER");
.dropIndex("20180401.1", "IDX_CSV_RESOURCEPID_AND_VER");
resourceLink
.dropColumn("RES_VERSION_ID");
.dropColumn("20180401.2", "RES_VERSION_ID");
resourceLink
.addColumn("CS_VERSION_ID")
.addColumn("20180401.3", "CS_VERSION_ID")
.nullable()
.type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 255);
resourceLink
.addColumn("CODESYSTEM_PID")
.addColumn("20180401.4", "CODESYSTEM_PID")
.nullable()
.type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
resourceLink
.addForeignKey("FK_CODESYSVER_CS_ID")
.addForeignKey("20180401.5", "FK_CODESYSVER_CS_ID")
.toColumn("CODESYSTEM_PID")
.references("TRM_CODESYSTEM", "PID");
@ -864,26 +865,28 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
Builder.BuilderWithTableName concept = version.onTable("TRM_CONCEPT");
version.startSectionWithMessage("Starting work on table: " + concept.getTableName());
concept
.addColumn("CODE_SEQUENCE")
.addColumn("20180401.6", "CODE_SEQUENCE")
.nullable()
.type(BaseTableColumnTypeTask.ColumnTypeEnum.INT);
}
private void init330() {
private void init330() { // 20180114 - 20180329
Builder version = forVersion(VersionEnum.V3_3_0);
version.initializeSchema("20180115.0", new SchemaInitializationProvider("/ca/uhn/hapi/fhir/jpa/docs/database", "HFJ_RESOURCE"));
Builder.BuilderWithTableName hfjResource = version.onTable("HFJ_RESOURCE");
version.startSectionWithMessage("Starting work on table: " + hfjResource.getTableName());
hfjResource.dropColumn("RES_TEXT");
hfjResource.dropColumn("RES_ENCODING");
hfjResource.dropColumn("20180115.1", "RES_TEXT");
hfjResource.dropColumn("20180115.2", "RES_ENCODING");
Builder.BuilderWithTableName hfjResVer = version.onTable("HFJ_RES_VER");
version.startSectionWithMessage("Starting work on table: " + hfjResVer.getTableName());
hfjResVer.modifyColumn("RES_ENCODING")
hfjResVer.modifyColumn("20180115.3", "RES_ENCODING")
.nullable();
hfjResVer.modifyColumn("RES_TEXT")
hfjResVer.modifyColumn("20180115.4", "RES_TEXT")
.nullable();
}

View File

@ -0,0 +1,95 @@
package ca.uhn.fhir.jpa.migrate.tasks;
/*-
* #%L
* HAPI FHIR JPA Server - Migration
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.tasks.api.ISchemaInitializationProvider;
import com.google.common.base.Charsets;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
public class SchemaInitializationProvider implements ISchemaInitializationProvider {
private final String mySchemaFileClassPath;
private final String mySchemaExistsIndicatorTable;
/**
*
* @param theSchemaFileClassPath pathname to script used to initialize schema
* @param theSchemaExistsIndicatorTable a table name we can use to determine if this schema has already been initialized
*/
public SchemaInitializationProvider(String theSchemaFileClassPath, String theSchemaExistsIndicatorTable) {
mySchemaFileClassPath = theSchemaFileClassPath;
mySchemaExistsIndicatorTable = theSchemaExistsIndicatorTable;
}
@Override
public List<String> getSqlStatements(DriverTypeEnum theDriverType) {
List<String> retval = new ArrayList<>();
String initScript;
initScript = mySchemaFileClassPath + "/" + theDriverType.getSchemaFilename();
try {
InputStream sqlFileInputStream = SchemaInitializationProvider.class.getResourceAsStream(initScript);
if (sqlFileInputStream == null) {
throw new ConfigurationException("Schema initialization script " + initScript + " not found on classpath");
}
// Assumes no escaped semicolons...
String[] statements = IOUtils.toString(sqlFileInputStream, Charsets.UTF_8).split("\\;");
for (String statement : statements) {
if (!statement.trim().isEmpty()) {
retval.add(statement);
}
}
} catch (IOException e) {
throw new ConfigurationException("Error reading schema initialization script " + initScript, e);
}
return retval;
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
SchemaInitializationProvider that = (SchemaInitializationProvider) theO;
return this.getClass().getSimpleName() == that.getClass().getSimpleName();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(this.getClass().getSimpleName())
.toHashCode();
}
@Override
public String getSchemaExistsIndicatorTable() {
return mySchemaExistsIndicatorTable;
}
}

View File

@ -20,13 +20,12 @@ package ca.uhn.fhir.jpa.migrate.tasks.api;
* #L%
*/
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.*;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import com.google.common.collect.Multimap;
import com.google.common.collect.MultimapBuilder;
import org.apache.commons.lang3.EnumUtils;
import org.apache.commons.lang3.Validate;
import org.intellij.lang.annotations.Language;
import org.flywaydb.core.api.MigrationVersion;
import javax.annotation.Nonnull;
import java.util.ArrayList;
@ -35,6 +34,7 @@ import java.util.List;
public class BaseMigrationTasks<T extends Enum> {
private Multimap<T, BaseTask<?>> myTasks = MultimapBuilder.hashKeys().arrayListValues().build();
MigrationVersion lastVersion;
@SuppressWarnings("unchecked")
public List<BaseTask<?>> getTasks(@Nonnull T theFrom, @Nonnull T theTo) {
@ -60,360 +60,42 @@ public class BaseMigrationTasks<T extends Enum> {
return retVal;
}
public Builder forVersion(T theVersion) {
public Builder forVersion(T theRelease) {
IAcceptsTasks sink = theTask -> {
theTask.validate();
myTasks.put(theVersion, theTask);
myTasks.put(theRelease, theTask);
};
return new Builder(sink);
return new Builder(theRelease.name(), sink);
}
public List<BaseTask<?>> getAllTasks(T[] theVersionEnumValues) {
List<BaseTask<?>> retval = new ArrayList<>();
for (T nextVersion : theVersionEnumValues) {
Collection<BaseTask<?>> nextValues = myTasks.get(nextVersion);
if (nextValues != null) {
validate(nextValues);
retval.addAll(nextValues);
}
}
return retval;
}
void validate(Collection<BaseTask<?>> theTasks) {
for (BaseTask task: theTasks) {
task.validateVersion();
String version = task.getFlywayVersion();
MigrationVersion migrationVersion = MigrationVersion.fromVersion(version);
if (lastVersion != null) {
if (migrationVersion.compareTo(lastVersion) <= 0) {
throw new IllegalStateException("Migration version " + migrationVersion + " found after migration version " + lastVersion + ". Migrations need to be in order by version number.");
}
}
lastVersion = migrationVersion;
}
}
public interface IAcceptsTasks {
void addTask(BaseTask<?> theTask);
}
public static class Builder {
private final IAcceptsTasks mySink;
public Builder(IAcceptsTasks theSink) {
mySink = theSink;
}
public BuilderWithTableName onTable(String theTableName) {
return new BuilderWithTableName(mySink, theTableName);
}
public void addTask(BaseTask<?> theTask) {
mySink.addTask(theTask);
}
public BuilderAddTableRawSql addTableRawSql(String theTableName) {
return new BuilderAddTableRawSql(theTableName);
}
public Builder executeRawSql(@Language("SQL") String theSql) {
mySink.addTask(new ExecuteRawSqlTask().addSql(theSql));
return this;
}
public Builder executeRawSql(DriverTypeEnum theDriver, @Language("SQL") String theSql) {
mySink.addTask(new ExecuteRawSqlTask().addSql(theDriver, theSql));
return this;
}
public Builder startSectionWithMessage(String theMessage) {
Validate.notBlank(theMessage);
addTask(new LogStartSectionWithMessageTask(theMessage));
return this;
}
public BuilderAddTableByColumns addTableByColumns(String theTableName, String thePkColumnName) {
return new BuilderAddTableByColumns(mySink, theTableName, thePkColumnName);
}
public void addIdGenerator(String theGeneratorName) {
AddIdGeneratorTask task = new AddIdGeneratorTask(theGeneratorName);
addTask(task);
}
public void dropIdGenerator(String theIdGeneratorName) {
DropIdGeneratorTask task = new DropIdGeneratorTask(theIdGeneratorName);
addTask(task);
}
public class BuilderAddTableRawSql {
private final AddTableRawSqlTask myTask;
protected BuilderAddTableRawSql(String theTableName) {
myTask = new AddTableRawSqlTask();
myTask.setTableName(theTableName);
addTask(myTask);
}
public BuilderAddTableRawSql addSql(DriverTypeEnum theDriverTypeEnum, @Language("SQL") String theSql) {
myTask.addSql(theDriverTypeEnum, theSql);
return this;
}
public void addSql(@Language("SQL") String theSql) {
myTask.addSql(theSql);
}
}
public class BuilderAddTableByColumns extends BuilderWithTableName implements IAcceptsTasks {
private final AddTableByColumnTask myTask;
public BuilderAddTableByColumns(IAcceptsTasks theSink, String theTableName, String thePkColumnName) {
super(theSink, theTableName);
myTask = new AddTableByColumnTask();
myTask.setTableName(theTableName);
myTask.setPkColumn(thePkColumnName);
theSink.addTask(myTask);
}
@Override
public BuilderWithTableName.BuilderAddColumnWithName addColumn(String theColumnName) {
return new BuilderWithTableName.BuilderAddColumnWithName(theColumnName, this);
}
@Override
public void addTask(BaseTask<?> theTask) {
if (theTask instanceof AddColumnTask) {
myTask.addAddColumnTask((AddColumnTask) theTask);
} else {
super.addTask(theTask);
}
}
}
public static class BuilderWithTableName implements IAcceptsTasks {
private final String myTableName;
private final IAcceptsTasks mySink;
public BuilderWithTableName(IAcceptsTasks theSink, String theTableName) {
mySink = theSink;
myTableName = theTableName;
}
public String getTableName() {
return myTableName;
}
public void dropIndex(String theIndexName) {
DropIndexTask task = new DropIndexTask();
task.setIndexName(theIndexName);
task.setTableName(myTableName);
addTask(task);
}
public void dropThisTable() {
DropTableTask task = new DropTableTask();
task.setTableName(myTableName);
addTask(task);
}
public BuilderAddIndexWithName addIndex(String theIndexName) {
return new BuilderAddIndexWithName(theIndexName);
}
public BuilderAddColumnWithName addColumn(String theColumnName) {
return new BuilderAddColumnWithName(theColumnName, this);
}
public void dropColumn(String theColumnName) {
Validate.notBlank(theColumnName);
DropColumnTask task = new DropColumnTask();
task.setTableName(myTableName);
task.setColumnName(theColumnName);
addTask(task);
}
@Override
public void addTask(BaseTask<?> theTask) {
((BaseTableTask<?>) theTask).setTableName(myTableName);
mySink.addTask(theTask);
}
public BuilderModifyColumnWithName modifyColumn(String theColumnName) {
return new BuilderModifyColumnWithName(theColumnName);
}
public BuilderAddForeignKey addForeignKey(String theForeignKeyName) {
return new BuilderAddForeignKey(theForeignKeyName);
}
public BuilderWithTableName renameColumn(String theOldName, String theNewName) {
return renameColumn(theOldName, theNewName, false, false);
}
/**
* @param theOldName The old column name
* @param theNewName The new column name
* @param theAllowNeitherColumnToExist Setting this to true means that it's not an error if neither column exists
* @param theDeleteTargetColumnFirstIfBothEixst Setting this to true causes the migrator to be ok with the target column existing. It will make sure that there is no data in the column with the new name, then delete it if so in order to make room for the renamed column. If there is data it will still bomb out.
*/
public BuilderWithTableName renameColumn(String theOldName, String theNewName, boolean theAllowNeitherColumnToExist, boolean theDeleteTargetColumnFirstIfBothEixst) {
RenameColumnTask task = new RenameColumnTask();
task.setTableName(myTableName);
task.setOldName(theOldName);
task.setNewName(theNewName);
task.setAllowNeitherColumnToExist(theAllowNeitherColumnToExist);
task.setDeleteTargetColumnFirstIfBothExist(theDeleteTargetColumnFirstIfBothEixst);
addTask(task);
return this;
}
/**
*
* @param theFkName the name of the foreign key
* @param theParentTableName the name of the table that exports the foreign key
*/
public void dropForeignKey(String theFkName, String theParentTableName) {
DropForeignKeyTask task = new DropForeignKeyTask();
task.setConstraintName(theFkName);
task.setTableName(getTableName());
task.setParentTableName(theParentTableName);
addTask(task);
}
public class BuilderAddIndexWithName {
private final String myIndexName;
public BuilderAddIndexWithName(String theIndexName) {
myIndexName = theIndexName;
}
public BuilderAddIndexUnique unique(boolean theUnique) {
return new BuilderAddIndexUnique(theUnique);
}
public class BuilderAddIndexUnique {
private final boolean myUnique;
public BuilderAddIndexUnique(boolean theUnique) {
myUnique = theUnique;
}
public void withColumns(String... theColumnNames) {
AddIndexTask task = new AddIndexTask();
task.setTableName(myTableName);
task.setIndexName(myIndexName);
task.setUnique(myUnique);
task.setColumns(theColumnNames);
addTask(task);
}
}
}
public class BuilderModifyColumnWithName {
private final String myColumnName;
public BuilderModifyColumnWithName(String theColumnName) {
myColumnName = theColumnName;
}
public String getColumnName() {
return myColumnName;
}
public BuilderModifyColumnWithNameAndNullable nullable() {
return new BuilderModifyColumnWithNameAndNullable(true);
}
public BuilderModifyColumnWithNameAndNullable nonNullable() {
return new BuilderModifyColumnWithNameAndNullable(false);
}
public class BuilderModifyColumnWithNameAndNullable {
private final boolean myNullable;
public BuilderModifyColumnWithNameAndNullable(boolean theNullable) {
myNullable = theNullable;
}
public void withType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType) {
withType(theColumnType, null);
}
public void withType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType, Integer theLength) {
if (theColumnType == BaseTableColumnTypeTask.ColumnTypeEnum.STRING) {
if (theLength == null || theLength == 0) {
throw new IllegalArgumentException("Can not specify length 0 for column of type " + theColumnType);
}
} else {
if (theLength != null) {
throw new IllegalArgumentException("Can not specify length for column of type " + theColumnType);
}
}
ModifyColumnTask task = new ModifyColumnTask();
task.setColumnName(myColumnName);
task.setTableName(myTableName);
if (theLength != null) {
task.setColumnLength(theLength);
}
task.setNullable(myNullable);
task.setColumnType(theColumnType);
addTask(task);
}
}
}
public class BuilderAddForeignKey {
private final String myForeignKeyName;
public BuilderAddForeignKey(String theForeignKeyName) {
myForeignKeyName = theForeignKeyName;
}
public BuilderAddForeignKeyToColumn toColumn(String theColumnName) {
return new BuilderAddForeignKeyToColumn(theColumnName);
}
public class BuilderAddForeignKeyToColumn extends BuilderModifyColumnWithName {
public BuilderAddForeignKeyToColumn(String theColumnName) {
super(theColumnName);
}
public void references(String theForeignTable, String theForeignColumn) {
AddForeignKeyTask task = new AddForeignKeyTask();
task.setTableName(myTableName);
task.setConstraintName(myForeignKeyName);
task.setColumnName(getColumnName());
task.setForeignTableName(theForeignTable);
task.setForeignColumnName(theForeignColumn);
addTask(task);
}
}
}
public static class BuilderAddColumnWithName {
private final String myColumnName;
private final IAcceptsTasks myTaskSink;
public BuilderAddColumnWithName(String theColumnName, IAcceptsTasks theTaskSink) {
myColumnName = theColumnName;
myTaskSink = theTaskSink;
}
public BuilderAddColumnWithNameNullable nullable() {
return new BuilderAddColumnWithNameNullable(true);
}
public BuilderAddColumnWithNameNullable nonNullable() {
return new BuilderAddColumnWithNameNullable(false);
}
public class BuilderAddColumnWithNameNullable {
private final boolean myNullable;
public BuilderAddColumnWithNameNullable(boolean theNullable) {
myNullable = theNullable;
}
public void type(AddColumnTask.ColumnTypeEnum theColumnType) {
type(theColumnType, null);
}
public void type(AddColumnTask.ColumnTypeEnum theColumnType, Integer theLength) {
AddColumnTask task = new AddColumnTask();
task.setColumnName(myColumnName);
task.setNullable(myNullable);
task.setColumnType(theColumnType);
if (theLength != null) {
task.setColumnLength(theLength);
}
myTaskSink.addTask(task);
}
}
}
}
}
}

View File

@ -0,0 +1,396 @@
package ca.uhn.fhir.jpa.migrate.tasks.api;
/*-
* #%L
* HAPI FHIR JPA Server - Migration
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.*;
import org.apache.commons.lang3.Validate;
import org.intellij.lang.annotations.Language;
public class Builder {
private final String myRelease;
private final BaseMigrationTasks.IAcceptsTasks mySink;
public Builder(String theRelease, BaseMigrationTasks.IAcceptsTasks theSink) {
myRelease = theRelease;
mySink = theSink;
}
public BuilderWithTableName onTable(String theTableName) {
return new BuilderWithTableName(myRelease, mySink, theTableName);
}
public void addTask(BaseTask<?> theTask) {
mySink.addTask(theTask);
}
public BuilderAddTableRawSql addTableRawSql(String theVersion, String theTableName) {
return new BuilderAddTableRawSql(theVersion, theTableName);
}
public Builder executeRawSql(String theVersion, @Language("SQL") String theSql) {
mySink.addTask(new ExecuteRawSqlTask(myRelease, theVersion).addSql(theSql));
return this;
}
public Builder initializeSchema(String theVersion, ISchemaInitializationProvider theSchemaInitializationProvider) {
mySink.addTask(new InitializeSchemaTask(myRelease, theVersion, theSchemaInitializationProvider));
return this;
}
public Builder executeRawSql(String theVersion, DriverTypeEnum theDriver, @Language("SQL") String theSql) {
mySink.addTask(new ExecuteRawSqlTask(myRelease, theVersion).addSql(theDriver, theSql));
return this;
}
// Flyway doesn't support these kinds of migrations
@Deprecated
public Builder startSectionWithMessage(String theMessage) {
// Do nothing
return this;
}
public BuilderAddTableByColumns addTableByColumns(String theVersion, String theTableName, String thePkColumnName) {
return new BuilderAddTableByColumns(myRelease, theVersion, mySink, theTableName, thePkColumnName);
}
public void addIdGenerator(String theVersion, String theGeneratorName) {
AddIdGeneratorTask task = new AddIdGeneratorTask(myRelease, theVersion, theGeneratorName);
addTask(task);
}
public void dropIdGenerator(String theVersion, String theIdGeneratorName) {
DropIdGeneratorTask task = new DropIdGeneratorTask(myRelease, theVersion, theIdGeneratorName);
addTask(task);
}
public class BuilderAddTableRawSql {
private final AddTableRawSqlTask myTask;
protected BuilderAddTableRawSql(String theVersion, String theTableName) {
myTask = new AddTableRawSqlTask(myRelease, theVersion);
myTask.setTableName(theTableName);
addTask(myTask);
}
public BuilderAddTableRawSql addSql(DriverTypeEnum theDriverTypeEnum, @Language("SQL") String theSql) {
myTask.addSql(theDriverTypeEnum, theSql);
return this;
}
public void addSql(@Language("SQL") String theSql) {
myTask.addSql(theSql);
}
}
public class BuilderAddTableByColumns extends BuilderWithTableName implements BaseMigrationTasks.IAcceptsTasks {
private final String myVersion;
private final AddTableByColumnTask myTask;
public BuilderAddTableByColumns(String theRelease, String theVersion, BaseMigrationTasks.IAcceptsTasks theSink, String theTableName, String thePkColumnName) {
super(theRelease, theSink, theTableName);
myVersion = theVersion;
myTask = new AddTableByColumnTask(myRelease, theVersion);
myTask.setTableName(theTableName);
myTask.setPkColumn(thePkColumnName);
theSink.addTask(myTask);
}
public BuilderAddColumnWithName addColumn(String theColumnName) {
return new BuilderAddColumnWithName(myRelease, myVersion, theColumnName, this);
}
@Override
public void addTask(BaseTask<?> theTask) {
if (theTask instanceof AddColumnTask) {
myTask.addAddColumnTask((AddColumnTask) theTask);
} else {
super.addTask(theTask);
}
}
}
public static class BuilderWithTableName implements BaseMigrationTasks.IAcceptsTasks {
private final String myRelease;
private final BaseMigrationTasks.IAcceptsTasks mySink;
private final String myTableName;
public BuilderWithTableName(String theRelease, BaseMigrationTasks.IAcceptsTasks theSink, String theTableName) {
myRelease = theRelease;
mySink = theSink;
myTableName = theTableName;
}
public String getTableName() {
return myTableName;
}
public void dropIndex(String theVersion, String theIndexName) {
DropIndexTask task = new DropIndexTask(myRelease, theVersion);
task.setIndexName(theIndexName);
task.setTableName(myTableName);
addTask(task);
}
public void dropThisTable(String theVersion) {
DropTableTask task = new DropTableTask(myRelease, theVersion);
task.setTableName(myTableName);
addTask(task);
}
public BuilderWithTableName.BuilderAddIndexWithName addIndex(String theVersion, String theIndexName) {
return new BuilderWithTableName.BuilderAddIndexWithName(theVersion, theIndexName);
}
public BuilderWithTableName.BuilderAddColumnWithName addColumn(String theVersion, String theColumnName) {
return new BuilderWithTableName.BuilderAddColumnWithName(myRelease, theVersion, theColumnName, this);
}
public void dropColumn(String theVersion, String theColumnName) {
Validate.notBlank(theColumnName);
DropColumnTask task = new DropColumnTask(myRelease, theVersion);
task.setTableName(myTableName);
task.setColumnName(theColumnName);
addTask(task);
}
@Override
public void addTask(BaseTask<?> theTask) {
((BaseTableTask<?>) theTask).setTableName(myTableName);
mySink.addTask(theTask);
}
public BuilderWithTableName.BuilderModifyColumnWithName modifyColumn(String theVersion, String theColumnName) {
return new BuilderWithTableName.BuilderModifyColumnWithName(theVersion, theColumnName);
}
public BuilderWithTableName.BuilderAddForeignKey addForeignKey(String theVersion, String theForeignKeyName) {
return new BuilderWithTableName.BuilderAddForeignKey(theVersion, theForeignKeyName);
}
public BuilderWithTableName renameColumn(String theVersion, String theOldName, String theNewName) {
return renameColumn(theVersion, theOldName, theNewName, false, false);
}
/**
* @param theOldName The old column name
* @param theNewName The new column name
* @param isOkayIfNeitherColumnExists Setting this to true means that it's not an error if neither column exists
* @param theDeleteTargetColumnFirstIfBothExist Setting this to true causes the migrator to be ok with the target column existing. It will make sure that there is no data in the column with the new name, then delete it if so in order to make room for the renamed column. If there is data it will still bomb out.
*/
public BuilderWithTableName renameColumn(String theVersion, String theOldName, String theNewName, boolean isOkayIfNeitherColumnExists, boolean theDeleteTargetColumnFirstIfBothExist) {
RenameColumnTask task = new RenameColumnTask(myRelease, theVersion);
task.setTableName(myTableName);
task.setOldName(theOldName);
task.setNewName(theNewName);
task.setOkayIfNeitherColumnExists(isOkayIfNeitherColumnExists);
task.setDeleteTargetColumnFirstIfBothExist(theDeleteTargetColumnFirstIfBothExist);
addTask(task);
return this;
}
/**
* @param theFkName the name of the foreign key
* @param theParentTableName the name of the table that exports the foreign key
*/
public void dropForeignKey(String theVersion, String theFkName, String theParentTableName) {
DropForeignKeyTask task = new DropForeignKeyTask(myRelease, theVersion);
task.setConstraintName(theFkName);
task.setTableName(getTableName());
task.setParentTableName(theParentTableName);
addTask(task);
}
public class BuilderAddIndexWithName {
private final String myVersion;
private final String myIndexName;
public BuilderAddIndexWithName(String theVersion, String theIndexName) {
myVersion = theVersion;
myIndexName = theIndexName;
}
public BuilderWithTableName.BuilderAddIndexWithName.BuilderAddIndexUnique unique(boolean theUnique) {
return new BuilderWithTableName.BuilderAddIndexWithName.BuilderAddIndexUnique(myVersion, theUnique);
}
public class BuilderAddIndexUnique {
private final String myVersion;
private final boolean myUnique;
public BuilderAddIndexUnique(String theVersion, boolean theUnique) {
myVersion = theVersion;
myUnique = theUnique;
}
public void withColumns(String... theColumnNames) {
AddIndexTask task = new AddIndexTask(myRelease, myVersion);
task.setTableName(myTableName);
task.setIndexName(myIndexName);
task.setUnique(myUnique);
task.setColumns(theColumnNames);
addTask(task);
}
}
}
public class BuilderModifyColumnWithName {
private final String myVersion;
private final String myColumnName;
public BuilderModifyColumnWithName(String theVersion, String theColumnName) {
myVersion = theVersion;
myColumnName = theColumnName;
}
public String getColumnName() {
return myColumnName;
}
public BuilderWithTableName.BuilderModifyColumnWithName.BuilderModifyColumnWithNameAndNullable nullable() {
return new BuilderWithTableName.BuilderModifyColumnWithName.BuilderModifyColumnWithNameAndNullable(myVersion, true);
}
public BuilderWithTableName.BuilderModifyColumnWithName.BuilderModifyColumnWithNameAndNullable nonNullable() {
return new BuilderWithTableName.BuilderModifyColumnWithName.BuilderModifyColumnWithNameAndNullable(myVersion, false);
}
public class BuilderModifyColumnWithNameAndNullable {
private final String myVersion;
private final boolean myNullable;
public BuilderModifyColumnWithNameAndNullable(String theVersion, boolean theNullable) {
myVersion = theVersion;
myNullable = theNullable;
}
public void withType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType) {
withType(theColumnType, null);
}
public void withType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType, Integer theLength) {
if (theColumnType == BaseTableColumnTypeTask.ColumnTypeEnum.STRING) {
if (theLength == null || theLength == 0) {
throw new IllegalArgumentException("Can not specify length 0 for column of type " + theColumnType);
}
} else {
if (theLength != null) {
throw new IllegalArgumentException("Can not specify length for column of type " + theColumnType);
}
}
ModifyColumnTask task = new ModifyColumnTask(myRelease, myVersion);
task.setColumnName(myColumnName);
task.setTableName(myTableName);
if (theLength != null) {
task.setColumnLength(theLength);
}
task.setNullable(myNullable);
task.setColumnType(theColumnType);
addTask(task);
}
}
}
public class BuilderAddForeignKey {
private final String myVersion;
private final String myForeignKeyName;
public BuilderAddForeignKey(String theVersion, String theForeignKeyName) {
myVersion = theVersion;
myForeignKeyName = theForeignKeyName;
}
public BuilderWithTableName.BuilderAddForeignKey.BuilderAddForeignKeyToColumn toColumn(String theColumnName) {
return new BuilderWithTableName.BuilderAddForeignKey.BuilderAddForeignKeyToColumn(myVersion, theColumnName);
}
public class BuilderAddForeignKeyToColumn extends BuilderWithTableName.BuilderModifyColumnWithName {
public BuilderAddForeignKeyToColumn(String theVersion, String theColumnName) {
super(theVersion, theColumnName);
}
public void references(String theForeignTable, String theForeignColumn) {
AddForeignKeyTask task = new AddForeignKeyTask(myRelease, myVersion);
task.setTableName(myTableName);
task.setConstraintName(myForeignKeyName);
task.setColumnName(getColumnName());
task.setForeignTableName(theForeignTable);
task.setForeignColumnName(theForeignColumn);
addTask(task);
}
}
}
public static class BuilderAddColumnWithName {
private final String myRelease;
private final String myVersion;
private final String myColumnName;
private final BaseMigrationTasks.IAcceptsTasks myTaskSink;
public BuilderAddColumnWithName(String theRelease, String theVersion, String theColumnName, BaseMigrationTasks.IAcceptsTasks theTaskSink) {
myRelease = theRelease;
myVersion = theVersion;
myColumnName = theColumnName;
myTaskSink = theTaskSink;
}
public BuilderWithTableName.BuilderAddColumnWithName.BuilderAddColumnWithNameNullable nullable() {
return new BuilderWithTableName.BuilderAddColumnWithName.BuilderAddColumnWithNameNullable(myRelease, myVersion, true);
}
public BuilderWithTableName.BuilderAddColumnWithName.BuilderAddColumnWithNameNullable nonNullable() {
return new BuilderWithTableName.BuilderAddColumnWithName.BuilderAddColumnWithNameNullable(myRelease, myVersion, false);
}
public class BuilderAddColumnWithNameNullable {
private final boolean myNullable;
private final String myRelease;
private final String myVersion;
public BuilderAddColumnWithNameNullable(String theRelease, String theVersion, boolean theNullable) {
myRelease = theRelease;
myVersion = theVersion;
myNullable = theNullable;
}
public void type(AddColumnTask.ColumnTypeEnum theColumnType) {
type(theColumnType, null);
}
public void type(AddColumnTask.ColumnTypeEnum theColumnType, Integer theLength) {
AddColumnTask task = new AddColumnTask(myRelease, myVersion);
task.setColumnName(myColumnName);
task.setNullable(myNullable);
task.setColumnType(theColumnType);
if (theLength != null) {
task.setColumnLength(theLength);
}
myTaskSink.addTask(task);
}
}
}
}
}

View File

@ -0,0 +1,31 @@
package ca.uhn.fhir.jpa.migrate.tasks.api;
/*-
* #%L
* HAPI FHIR JPA Server - Migration
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import java.util.List;
public interface ISchemaInitializationProvider {
List<String> getSqlStatements(DriverTypeEnum theDriverType);
String getSchemaExistsIndicatorTable();
}

View File

@ -0,0 +1,34 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.jpa.migrate.taskdef.AddTableRawSqlTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTest;
import org.junit.Test;
import java.util.Collections;
import java.util.Properties;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class SchemaMigratorTest extends BaseTest {
@Test
public void migrationRequired() {
AddTableRawSqlTask task = new AddTableRawSqlTask("1", "1");
task.setTableName("SOMETABLE");
task.addSql(DriverTypeEnum.H2_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
getMigrator().addTask(task);
SchemaMigrator schemaMigrator = new SchemaMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), Collections.singletonList(task));
try {
schemaMigrator.validate();
fail();
} catch (ConfigurationException e) {
assertEquals("The database schema for " + getUrl() + " is out of date. Current database schema version is unknown. Schema version required by application is " + task.getFlywayVersion() + ". Please run the database migrator.", e.getMessage());
}
getMigrator().migrate();
schemaMigrator.validate();
}
}

View File

@ -15,7 +15,7 @@ public class AddColumnTest extends BaseTest {
public void testColumnDoesntAlreadyExist() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
AddColumnTask task = new AddColumnTask();
AddColumnTask task = new AddColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setColumnName("newcol");
task.setColumnType(AddColumnTask.ColumnTypeEnum.LONG);
@ -31,7 +31,7 @@ public class AddColumnTest extends BaseTest {
public void testAddColumnInt() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
AddColumnTask task = new AddColumnTask();
AddColumnTask task = new AddColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setColumnName("newcolint");
task.setColumnType(AddColumnTask.ColumnTypeEnum.INT);
@ -48,7 +48,7 @@ public class AddColumnTest extends BaseTest {
public void testColumnAlreadyExists() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), newcol bigint)");
AddColumnTask task = new AddColumnTask();
AddColumnTask task = new AddColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setColumnName("newcol");
task.setColumnType(AddColumnTask.ColumnTypeEnum.LONG);
@ -58,5 +58,4 @@ public class AddColumnTest extends BaseTest {
assertThat(JdbcUtils.getColumnNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("PID", "TEXTCOL", "NEWCOL"));
}
}

View File

@ -6,7 +6,6 @@ import org.junit.Test;
import java.sql.SQLException;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.assertThat;
@ -18,7 +17,7 @@ public class AddForeignKeyTaskTest extends BaseTest {
executeSql("create table FOREIGNTBL (PID bigint not null, HOMEREF bigint)");
assertThat(JdbcUtils.getForeignKeys(getConnectionProperties(), "HOME", "FOREIGNTBL"), empty());
AddForeignKeyTask task = new AddForeignKeyTask();
AddForeignKeyTask task = new AddForeignKeyTask("1", "1");
task.setTableName("FOREIGNTBL");
task.setColumnName("HOMEREF");
task.setConstraintName("FK_HOME_FOREIGN");

View File

@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
import ca.uhn.fhir.util.VersionEnum;
import org.junit.Test;
@ -18,15 +19,15 @@ public class AddIdGeneratorTaskTest extends BaseTest {
public void testAddIdGenerator() throws SQLException {
assertThat(JdbcUtils.getSequenceNames(getConnectionProperties()), empty());
MyMigrationTasks migrator = new MyMigrationTasks();
getMigrator().addTasks(migrator.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
MyMigrationTasks migrationTasks = new MyMigrationTasks("123456.7");
getMigrator().addTasks(migrationTasks.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().migrate();
assertThat(JdbcUtils.getSequenceNames(getConnectionProperties()), containsInAnyOrder("SEQ_FOO"));
// Second time, should produce no action
migrator = new MyMigrationTasks();
getMigrator().addTasks(migrator.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
migrationTasks = new MyMigrationTasks("123456.8");
getMigrator().addTasks(migrationTasks.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().migrate();
assertThat(JdbcUtils.getSequenceNames(getConnectionProperties()), containsInAnyOrder("SEQ_FOO"));
@ -37,9 +38,9 @@ public class AddIdGeneratorTaskTest extends BaseTest {
private static class MyMigrationTasks extends BaseMigrationTasks<VersionEnum> {
public MyMigrationTasks() {
public MyMigrationTasks(String theVersion) {
Builder v = forVersion(VersionEnum.V3_5_0);
v.addIdGenerator("SEQ_FOO");
v.addIdGenerator(theVersion, "SEQ_FOO");
}

View File

@ -16,7 +16,7 @@ public class AddIndexTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
executeSql("ALTER TABLE SOMETABLE ADD CONSTRAINT IDX_ANINDEX UNIQUE(TEXTCOL)");
AddIndexTask task = new AddIndexTask();
AddIndexTask task = new AddIndexTask("1", "1");
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");
task.setColumns("TEXTCOL");
@ -38,7 +38,7 @@ public class AddIndexTest extends BaseTest {
executeSql("create unique index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
executeSql("create unique index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
AddIndexTask task = new AddIndexTask();
AddIndexTask task = new AddIndexTask("1", "1");
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");
task.setColumns("PID", "TEXTCOL");
@ -59,7 +59,7 @@ public class AddIndexTest extends BaseTest {
executeSql("create index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
executeSql("create index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
AddIndexTask task = new AddIndexTask();
AddIndexTask task = new AddIndexTask("1", "1");
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");
task.setColumns("PID", "TEXTCOL");
@ -79,7 +79,7 @@ public class AddIndexTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
executeSql("create unique index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
AddIndexTask task = new AddIndexTask();
AddIndexTask task = new AddIndexTask("1", "1");
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");
task.setColumns("PID", "TEXTCOL");

View File

@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
import ca.uhn.fhir.util.VersionEnum;
import org.junit.Test;
@ -27,18 +28,16 @@ public class AddTableByColumnTaskTest extends BaseTest {
public MyMigrationTasks() {
Builder v = forVersion(VersionEnum.V3_5_0);
Builder.BuilderWithTableName targetTable = v.addTableByColumns("TGT_TABLE", "PID");
targetTable.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
Builder.BuilderWithTableName targetTable = v.addTableByColumns("1", "TGT_TABLE", "PID");
targetTable.addColumn("2", "PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
Builder.BuilderAddTableByColumns fooTable = v.addTableByColumns("FOO_TABLE", "PID");
Builder.BuilderAddTableByColumns fooTable = v.addTableByColumns("3", "FOO_TABLE", "PID");
fooTable.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
fooTable.addColumn("HELLO").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
fooTable.addColumn("COL_REF").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
fooTable.addIndex("IDX_HELLO").unique(true).withColumns("HELLO");
fooTable.addForeignKey("FK_REF").toColumn("COL_REF").references("TGT_TABLE", "PID");
fooTable.addIndex("4", "IDX_HELLO").unique(true).withColumns("HELLO");
fooTable.addForeignKey("5", "FK_REF").toColumn("COL_REF").references("TGT_TABLE", "PID");
}
}
}

View File

@ -14,7 +14,7 @@ public class AddTableTest extends BaseTest {
@Test
public void testTableDoesntAlreadyExist() throws SQLException {
AddTableRawSqlTask task = new AddTableRawSqlTask();
AddTableRawSqlTask task = new AddTableRawSqlTask("1", "1");
task.setTableName("SOMETABLE");
task.addSql(DriverTypeEnum.H2_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
getMigrator().addTask(task);
@ -29,11 +29,10 @@ public class AddTableTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), containsInAnyOrder("SOMETABLE"));
AddTableRawSqlTask task = new AddTableRawSqlTask();
AddTableRawSqlTask task = new AddTableRawSqlTask("1", "1");
task.setTableName("SOMETABLE");
task.addSql(DriverTypeEnum.H2_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
getMigrator().addTask(task);
getMigrator().migrate();
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), containsInAnyOrder("SOMETABLE"));

View File

@ -22,7 +22,7 @@ public class ArbitrarySqlTaskTest extends BaseTest {
executeSql("insert into HFJ_RES_PARAM_PRESENT (PID, SP_ID, SP_PRESENT, HASH_PRESENT) values (100, 1, true, null)");
executeSql("insert into HFJ_RES_PARAM_PRESENT (PID, SP_ID, SP_PRESENT, HASH_PRESENT) values (101, 2, true, null)");
ArbitrarySqlTask task = new ArbitrarySqlTask("HFJ_RES_PARAM_PRESENT", "Consolidate search parameter presence indexes");
ArbitrarySqlTask task = new ArbitrarySqlTask(VersionEnum.V3_5_0, "1", "HFJ_RES_PARAM_PRESENT", "Consolidate search parameter presence indexes");
task.setExecuteOnlyIfTableExists("hfj_search_parm");
task.setBatchSize(1);
String sql = "SELECT " +
@ -56,7 +56,7 @@ public class ArbitrarySqlTaskTest extends BaseTest {
@Test
public void testExecuteOnlyIfTableExists() {
ArbitrarySqlTask task = new ArbitrarySqlTask("HFJ_RES_PARAM_PRESENT", "Consolidate search parameter presence indexes");
ArbitrarySqlTask task = new ArbitrarySqlTask(VersionEnum.V3_5_0, "1", "HFJ_RES_PARAM_PRESENT", "Consolidate search parameter presence indexes");
task.setBatchSize(1);
String sql = "SELECT * FROM HFJ_SEARCH_PARM";
task.addQuery(sql, ArbitrarySqlTask.QueryModeEnum.BATCH_UNTIL_NO_MORE, t -> {
@ -82,7 +82,7 @@ public class ArbitrarySqlTaskTest extends BaseTest {
};
migrator
.forVersion(VersionEnum.V3_5_0)
.addTableRawSql("A")
.addTableRawSql("1", "A")
.addSql("delete from TEST_UPDATE_TASK where RES_TYPE = 'Patient'");
getMigrator().addTasks(migrator.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
@ -106,8 +106,8 @@ public class ArbitrarySqlTaskTest extends BaseTest {
};
migrator
.forVersion(VersionEnum.V3_5_0)
.executeRawSql(DriverTypeEnum.H2_EMBEDDED, "delete from TEST_UPDATE_TASK where RES_TYPE = 'Patient'")
.executeRawSql(DriverTypeEnum.H2_EMBEDDED, "delete from TEST_UPDATE_TASK where RES_TYPE = 'Encounter'");
.executeRawSql("1", DriverTypeEnum.H2_EMBEDDED, "delete from TEST_UPDATE_TASK where RES_TYPE = 'Patient'")
.executeRawSql("2", DriverTypeEnum.H2_EMBEDDED, "delete from TEST_UPDATE_TASK where RES_TYPE = 'Encounter'");
getMigrator().addTasks(migrator.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().migrate();

View File

@ -0,0 +1,50 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class BaseTaskTest {
@Test
public void testValidateVersionCorrect() {
DropTableTask task = new DropTableTask("1", "12345678.9");
task.validateVersion();
}
@Test
public void testValidateVersionShort() {
DropTableTask task = new DropTableTask("1", "123.4");
try {
task.validateVersion();
fail();
} catch (IllegalStateException e) {
assertEquals("The version 123.4 does not match the expected pattern " + BaseTask.MIGRATION_VERSION_PATTERN, e.getMessage());
}
}
@Test
public void testValidateVersionNoPeriod() {
DropTableTask task = new DropTableTask("1", "123456789");
try {
task.validateVersion();
fail();
} catch (IllegalStateException e) {
assertEquals("The version 123456789 does not match the expected pattern " + BaseTask.MIGRATION_VERSION_PATTERN, e.getMessage());
}
}
@Test
public void testValidateVersionTooManyPeriods() {
DropTableTask task = new DropTableTask("1", "12345678.9.1");
try {
task.validateVersion();
fail();
} catch (IllegalStateException e) {
assertEquals("The version 12345678.9.1 does not match the expected pattern " + BaseTask.MIGRATION_VERSION_PATTERN, e.getMessage());
}
}
}

View File

@ -1,21 +1,28 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.Migrator;
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import org.apache.commons.dbcp2.BasicDataSource;
import org.intellij.lang.annotations.Language;
import org.junit.After;
import org.junit.Before;
import org.springframework.jdbc.core.ColumnMapRowMapper;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class BaseTest {
private static final String DATABASE_NAME = "DATABASE";
private static int ourDatabaseUrl = 0;
private String myUrl;
private Migrator myMigrator;
private FlywayMigrator myMigrator;
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
private final BasicDataSource myDataSource = new BasicDataSource();
public String getUrl() {
return myUrl;
@ -25,6 +32,30 @@ public class BaseTest {
return myConnectionProperties;
}
@Before()
public void before() {
org.h2.Driver.class.toString();
myUrl = "jdbc:h2:mem:" + DATABASE_NAME + ourDatabaseUrl++;
myConnectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(myUrl, "SA", "SA");
myDataSource.setUrl(myUrl);
myDataSource.setUsername("SA");
myDataSource.setPassword("SA");
myDataSource.setDriverClassName(DriverTypeEnum.H2_EMBEDDED.getDriverClassName());
myMigrator = new FlywayMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, myDataSource);
}
protected BasicDataSource getDataSource() {
return myDataSource;
}
@After
public void resetMigrationVersion() throws SQLException {
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
if (tableNames.contains(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME)) {
executeSql("DELETE from " + SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME + " where \"installed_rank\" > 0");
}
}
protected void executeSql(@Language("SQL") String theSql, Object... theArgs) {
myConnectionProperties.getTxTemplate().execute(t -> {
@ -39,7 +70,7 @@ public class BaseTest {
});
}
public Migrator getMigrator() {
public FlywayMigrator getMigrator() {
return myMigrator;
}
@ -48,19 +79,5 @@ public class BaseTest {
myConnectionProperties.close();
}
@Before()
public void before() {
org.h2.Driver.class.toString();
myUrl = "jdbc:h2:mem:database" + (ourDatabaseUrl++);
myConnectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(myUrl, "SA", "SA");
myMigrator = new Migrator();
myMigrator.setConnectionUrl(myUrl);
myMigrator.setDriverType(DriverTypeEnum.H2_EMBEDDED);
myMigrator.setUsername("SA");
myMigrator.setPassword("SA");
}
}

View File

@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.util.VersionEnum;
import org.junit.Test;
import org.springframework.jdbc.core.JdbcTemplate;
@ -17,7 +18,7 @@ public class CalculateHashesTest extends BaseTest {
executeSql("insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (false, 'identifier', 999, 'Patient', '2018-09-03 07:44:49.196', 'urn:oid:1.2.410.100110.10.41308301', '88888888', 1)");
executeSql("insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (false, 'identifier', 999, 'Patient', '2018-09-03 07:44:49.196', 'urn:oid:1.2.410.100110.10.41308301', '99999999', 2)");
CalculateHashesTask task = new CalculateHashesTask();
CalculateHashesTask task = new CalculateHashesTask(VersionEnum.V3_5_0, "1");
task.setTableName("HFJ_SPIDX_TOKEN");
task.setColumnName("HASH_IDENTITY");
task.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")));
@ -64,7 +65,7 @@ public class CalculateHashesTest extends BaseTest {
});
assertEquals(777L, count.longValue());
CalculateHashesTask task = new CalculateHashesTask();
CalculateHashesTask task = new CalculateHashesTask(VersionEnum.V3_5_0, "1");
task.setTableName("HFJ_SPIDX_TOKEN");
task.setColumnName("HASH_IDENTITY");
task.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")));

View File

@ -14,7 +14,7 @@ public class DropColumnTest extends BaseTest {
public void testDropColumn() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
DropColumnTask task = new DropColumnTask();
DropColumnTask task = new DropColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setColumnName("TEXTCOL");
getMigrator().addTask(task);

View File

@ -19,7 +19,7 @@ public class DropForeignKeyTaskTest extends BaseTest {
assertThat(JdbcUtils.getForeignKeys(getConnectionProperties(), "PARENT", "CHILD"), hasSize(1));
DropForeignKeyTask task = new DropForeignKeyTask();
DropForeignKeyTask task = new DropForeignKeyTask("1", "1");
task.setTableName("CHILD");
task.setParentTableName("PARENT");
task.setConstraintName("FK_MOM");

View File

@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
import ca.uhn.fhir.util.VersionEnum;
import org.junit.Test;
@ -32,7 +33,7 @@ public class DropIdGeneratorTaskTest extends BaseTest {
public MyMigrationTasks() {
Builder v = forVersion(VersionEnum.V3_5_0);
v.dropIdGenerator("SEQ_FOO");
v.dropIdGenerator("1", "SEQ_FOO");
}

View File

@ -16,7 +16,7 @@ public class DropIndexTest extends BaseTest {
executeSql("create index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
executeSql("create index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
DropIndexTask task = new DropIndexTask();
DropIndexTask task = new DropIndexTask("1", "1");
task.setDescription("Drop an index");
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");
@ -32,7 +32,7 @@ public class DropIndexTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
executeSql("create index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
DropIndexTask task = new DropIndexTask();
DropIndexTask task = new DropIndexTask("1", "1");
task.setDescription("Drop an index");
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");
@ -50,7 +50,7 @@ public class DropIndexTest extends BaseTest {
executeSql("create index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
executeSql("create index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
DropIndexTask task = new DropIndexTask();
DropIndexTask task = new DropIndexTask("1", "1");
task.setDescription("Drop an index");
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");
@ -66,7 +66,7 @@ public class DropIndexTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
executeSql("create index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
DropIndexTask task = new DropIndexTask();
DropIndexTask task = new DropIndexTask("1", "1");
task.setDescription("Drop an index");
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");

View File

@ -5,8 +5,7 @@ import org.junit.Test;
import java.sql.SQLException;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.core.IsNot.not;
import static org.junit.Assert.assertThat;
@ -18,7 +17,7 @@ public class DropTableTest extends BaseTest {
executeSql("create index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
executeSql("create index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
DropTableTask task = new DropTableTask();
DropTableTask task = new DropTableTask("1", "1");
task.setTableName("SOMETABLE");
getMigrator().addTask(task);
@ -35,7 +34,7 @@ public class DropTableTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint not null, REMOTEPID bigint not null, primary key (PID))");
executeSql("alter table SOMETABLE add constraint FK_MYFK foreign key (REMOTEPID) references FOREIGNTABLE;");
DropTableTask task = new DropTableTask();
DropTableTask task = new DropTableTask("1", "1");
task.setTableName("SOMETABLE");
getMigrator().addTask(task);
@ -49,7 +48,7 @@ public class DropTableTest extends BaseTest {
@Test
public void testDropNonExistingTable() throws SQLException {
DropTableTask task = new DropTableTask();
DropTableTask task = new DropTableTask("1", "1");
task.setTableName("SOMETABLE");
getMigrator().addTask(task);
@ -58,4 +57,22 @@ public class DropTableTest extends BaseTest {
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), not(hasItems("SOMETABLE")));
}
@Test
public void testFlywayGetMigrationInfo() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
DropTableTask task = new DropTableTask("1", "1");
task.setTableName("SOMETABLE");
getMigrator().addTask(task);
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), (hasItems("SOMETABLE")));
assertThat(getMigrator().getMigrationInfo().pending().length, greaterThan(0));
getMigrator().migrate();
assertThat(getMigrator().getMigrationInfo().pending().length, equalTo(0));
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), not(hasItems("SOMETABLE")));
}
}

View File

@ -0,0 +1,51 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class HashTest {
@Test
public void testHash() {
AddColumnTask task1 = buildTask();
AddColumnTask task2 = buildTask();
assertEquals(task1.hashCode(), task2.hashCode());
}
private AddColumnTask buildTask() {
AddColumnTask task = new AddColumnTask("1", "1");
task.setTableName("TRM_CODESYSTEM_VER");
task.setColumnName("CS_VERSION_ID");
task.setNullable(true);
task.setColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING);
task.setColumnLength(255);
return task;
}
@Test
public void testCheckAllHashes() {
List<BaseTask<?>> tasks1 = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(VersionEnum.values());
Map<String, Integer> hashesByVersion = new HashMap<>();
for (BaseTask task : tasks1) {
String version = task.getFlywayVersion();
assertNull("Duplicate flyway version " + version + " in " + HapiFhirJpaMigrationTasks.class.getName(), hashesByVersion.get(version));
hashesByVersion.put(version, task.hashCode());
}
List<BaseTask<?>> tasks2 = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(VersionEnum.values());
for (BaseTask task : tasks2) {
String version = task.getFlywayVersion();
int origHash = hashesByVersion.get(version);
assertEquals("Hashes differ for task " + version, origHash, task.hashCode());
}
}
}

View File

@ -0,0 +1,66 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.tasks.api.ISchemaInitializationProvider;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.junit.Test;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertThat;
public class InitializeSchemaTaskTest extends BaseTest {
@Test
public void testInitializeTwice() throws SQLException {
InitializeSchemaTask task = new InitializeSchemaTask("1", "1", new TestProvider());
getMigrator().addTask(task);
getMigrator().migrate();
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), containsInAnyOrder("SOMETABLE"));
// Second migrate runs without issue
getMigrator().removeAllTasksForUnitTest();
InitializeSchemaTask identicalTask = new InitializeSchemaTask("1", "1", new TestProvider());
getMigrator().addTask(identicalTask);
getMigrator().migrate();
}
private class TestProvider implements ISchemaInitializationProvider {
@Override
public List<String> getSqlStatements(DriverTypeEnum theDriverType) {
return Collections.singletonList("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
}
@Override
public String getSchemaExistsIndicatorTable() {
return "DONT_MATCH_ME";
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
TestProvider that = (TestProvider) theO;
return size() == that.size();
}
private int size() {
return getSqlStatements(DriverTypeEnum.H2_EMBEDDED).size();
}
// This could be stricter, but we don't want this to be brittle.
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(size())
.toHashCode();
}
}
}

View File

@ -13,7 +13,7 @@ public class ModifyColumnTest extends BaseTest {
public void testColumnWithJdbcTypeClob() throws SQLException {
executeSql("create table SOMETABLE (TEXTCOL clob)");
ModifyColumnTask task = new ModifyColumnTask();
ModifyColumnTask task = new ModifyColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setColumnName("TEXTCOL");
task.setColumnType(AddColumnTask.ColumnTypeEnum.STRING);
@ -36,7 +36,7 @@ public class ModifyColumnTest extends BaseTest {
public void testColumnAlreadyExists() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), newcol bigint)");
ModifyColumnTask task = new ModifyColumnTask();
ModifyColumnTask task = new ModifyColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setColumnName("TEXTCOL");
task.setColumnType(AddColumnTask.ColumnTypeEnum.STRING);
@ -59,7 +59,7 @@ public class ModifyColumnTest extends BaseTest {
public void testNoShrink_SameNullable() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), newcol bigint)");
ModifyColumnTask task = new ModifyColumnTask();
ModifyColumnTask task = new ModifyColumnTask("1", "123456.7");
task.setTableName("SOMETABLE");
task.setColumnName("TEXTCOL");
task.setColumnType(AddColumnTask.ColumnTypeEnum.STRING);
@ -88,7 +88,7 @@ public class ModifyColumnTest extends BaseTest {
assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// PID
ModifyColumnTask task = new ModifyColumnTask();
ModifyColumnTask task = new ModifyColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setColumnName("PID");
task.setColumnType(AddColumnTask.ColumnTypeEnum.LONG);
@ -96,7 +96,7 @@ public class ModifyColumnTest extends BaseTest {
getMigrator().addTask(task);
// STRING
task = new ModifyColumnTask();
task = new ModifyColumnTask("1", "2");
task.setTableName("SOMETABLE");
task.setColumnName("TEXTCOL");
task.setColumnType(AddColumnTask.ColumnTypeEnum.STRING);
@ -130,7 +130,7 @@ public class ModifyColumnTest extends BaseTest {
getMigrator().setNoColumnShrink(true);
// PID
ModifyColumnTask task = new ModifyColumnTask();
ModifyColumnTask task = new ModifyColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setColumnName("PID");
task.setColumnType(AddColumnTask.ColumnTypeEnum.LONG);
@ -138,7 +138,7 @@ public class ModifyColumnTest extends BaseTest {
getMigrator().addTask(task);
// STRING
task = new ModifyColumnTask();
task = new ModifyColumnTask("1", "2");
task.setTableName("SOMETABLE");
task.setColumnName("DATECOL");
task.setColumnType(AddColumnTask.ColumnTypeEnum.DATE_TIMESTAMP);
@ -167,7 +167,7 @@ public class ModifyColumnTest extends BaseTest {
assertEquals(new JdbcUtils.ColumnType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 255), JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL"));
// PID
ModifyColumnTask task = new ModifyColumnTask();
ModifyColumnTask task = new ModifyColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setColumnName("PID");
task.setColumnType(AddColumnTask.ColumnTypeEnum.LONG);
@ -175,7 +175,7 @@ public class ModifyColumnTest extends BaseTest {
getMigrator().addTask(task);
// STRING
task = new ModifyColumnTask();
task = new ModifyColumnTask("1", "2");
task.setTableName("SOMETABLE");
task.setColumnName("TEXTCOL");
task.setColumnType(AddColumnTask.ColumnTypeEnum.STRING);
@ -201,7 +201,7 @@ public class ModifyColumnTest extends BaseTest {
public void testColumnDoesntAlreadyExist() throws SQLException {
executeSql("create table SOMETABLE (PID bigint, TEXTCOL varchar(255))");
ModifyColumnTask task = new ModifyColumnTask();
ModifyColumnTask task = new ModifyColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setColumnName("SOMECOLUMN");
task.setDescription("Make nullable");

View File

@ -1,13 +1,12 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.flywaydb.core.api.FlywayException;
import org.junit.Test;
import java.sql.SQLException;
import java.util.Set;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.*;
@ -17,7 +16,7 @@ public class RenameColumnTaskTest extends BaseTest {
public void testColumnAlreadyExists() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
RenameColumnTask task = new RenameColumnTask();
RenameColumnTask task = new RenameColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setDescription("Drop an index");
task.setOldName("myTextCol");
@ -33,7 +32,7 @@ public class RenameColumnTaskTest extends BaseTest {
public void testBothExistDeleteTargetFirst() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), myTextCol varchar(255))");
RenameColumnTask task = new RenameColumnTask();
RenameColumnTask task = new RenameColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setDescription("Drop an index");
task.setOldName("myTextCol");
@ -52,7 +51,7 @@ public class RenameColumnTaskTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), myTextCol varchar(255))");
executeSql("INSERT INTO SOMETABLE (PID, TEXTCOL, myTextCol) VALUES (123, 'AAA', 'BBB')");
RenameColumnTask task = new RenameColumnTask();
RenameColumnTask task = new RenameColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setDescription("Drop an index");
task.setOldName("myTextCol");
@ -63,8 +62,8 @@ public class RenameColumnTaskTest extends BaseTest {
try {
getMigrator().migrate();
fail();
} catch (InternalErrorException e) {
assertEquals("Failure executing task \"Drop an index\", aborting! Cause: java.sql.SQLException: Can not rename SOMETABLE.myTextCol to TEXTCOL because both columns exist and data exists in TEXTCOL", e.getMessage());
} catch (FlywayException e) {
assertEquals("Failure executing task \"Drop an index\", aborting! Cause: java.sql.SQLException: Can not rename SOMETABLE.myTextCol to TEXTCOL because both columns exist and data exists in TEXTCOL", e.getCause().getCause().getMessage());
}
}
@ -73,7 +72,7 @@ public class RenameColumnTaskTest extends BaseTest {
public void testColumnDoesntAlreadyExist() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, myTextCol varchar(255))");
RenameColumnTask task = new RenameColumnTask();
RenameColumnTask task = new RenameColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setDescription("Drop an index");
task.setOldName("myTextCol");
@ -89,7 +88,7 @@ public class RenameColumnTaskTest extends BaseTest {
public void testNeitherColumnExists() {
executeSql("create table SOMETABLE (PID bigint not null)");
RenameColumnTask task = new RenameColumnTask();
RenameColumnTask task = new RenameColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setOldName("myTextCol");
task.setNewName("TEXTCOL");
@ -98,8 +97,8 @@ public class RenameColumnTaskTest extends BaseTest {
try {
getMigrator().migrate();
fail();
} catch (InternalErrorException e) {
assertEquals("Failure executing task \"RenameColumnTask\", aborting! Cause: java.sql.SQLException: Can not rename SOMETABLE.myTextCol to TEXTCOL because neither column exists!", e.getMessage());
} catch (FlywayException e) {
assertEquals("Failure executing task \"RenameColumnTask\", aborting! Cause: java.sql.SQLException: Can not rename SOMETABLE.myTextCol to TEXTCOL because neither column exists!", e.getCause().getCause().getMessage());
}
@ -109,11 +108,11 @@ public class RenameColumnTaskTest extends BaseTest {
public void testNeitherColumnExistsButAllowed() {
executeSql("create table SOMETABLE (PID bigint not null)");
RenameColumnTask task = new RenameColumnTask();
RenameColumnTask task = new RenameColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setOldName("myTextCol");
task.setNewName("TEXTCOL");
task.setAllowNeitherColumnToExist(true);
task.setOkayIfNeitherColumnExists(true);
getMigrator().addTask(task);
getMigrator().migrate();
@ -123,7 +122,7 @@ public class RenameColumnTaskTest extends BaseTest {
public void testBothColumnsExist() {
executeSql("create table SOMETABLE (PID bigint not null, PID2 bigint)");
RenameColumnTask task = new RenameColumnTask();
RenameColumnTask task = new RenameColumnTask("1", "1");
task.setTableName("SOMETABLE");
task.setOldName("PID");
task.setNewName("PID2");
@ -132,8 +131,8 @@ public class RenameColumnTaskTest extends BaseTest {
try {
getMigrator().migrate();
fail();
} catch (InternalErrorException e) {
assertEquals("Failure executing task \"RenameColumnTask\", aborting! Cause: java.sql.SQLException: Can not rename SOMETABLE.PID to PID2 because both columns exist!", e.getMessage());
} catch (FlywayException e) {
assertEquals("Failure executing task \"RenameColumnTask\", aborting! Cause: java.sql.SQLException: Can not rename SOMETABLE.PID to PID2 because both columns exist!", e.getCause().getCause().getMessage());
}

View File

@ -10,5 +10,4 @@ public class HapiFhirJpaMigrationTasksTest {
public void testCreate() {
new HapiFhirJpaMigrationTasks(Collections.emptySet());
}
}

View File

@ -0,0 +1,68 @@
package ca.uhn.fhir.jpa.migrate.tasks.api;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.DropTableTask;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class BaseMigrationTasksTest {
static class MyMigrationTasks extends BaseMigrationTasks {
}
@Test
public void testValidateCorrectOrder() {
MyMigrationTasks migrationTasks = new MyMigrationTasks();
List<BaseTask<?>> tasks = new ArrayList<>();
tasks.add(new DropTableTask("1", "20191029.1"));
tasks.add(new DropTableTask("1", "20191029.2"));
migrationTasks.validate(tasks);
}
@Test
public void testValidateVersionWrongOrder() {
MyMigrationTasks migrationTasks = new MyMigrationTasks();
List<BaseTask<?>> tasks = new ArrayList<>();
tasks.add(new DropTableTask("1", "20191029.2"));
tasks.add(new DropTableTask("1", "20191029.1"));
try {
migrationTasks.validate(tasks);
fail();
} catch (IllegalStateException e) {
assertEquals("Migration version 1.20191029.1 found after migration version 1.20191029.2. Migrations need to be in order by version number.", e.getMessage());
}
}
@Test
public void testValidateSameVersion() {
MyMigrationTasks migrationTasks = new MyMigrationTasks();
List<BaseTask<?>> tasks = new ArrayList<>();
tasks.add(new DropTableTask("1", "20191029.1"));
tasks.add(new DropTableTask("1", "20191029.1"));
try {
migrationTasks.validate(tasks);
fail();
} catch (IllegalStateException e) {
assertEquals("Migration version 1.20191029.1 found after migration version 1.20191029.1. Migrations need to be in order by version number.", e.getMessage());
}
}
@Test
public void testValidateWrongDateOrder() {
MyMigrationTasks migrationTasks = new MyMigrationTasks();
List<BaseTask<?>> tasks = new ArrayList<>();
tasks.add(new DropTableTask("1", "20191029.1"));
tasks.add(new DropTableTask("1", "20191028.1"));
try {
migrationTasks.validate(tasks);
fail();
} catch (IllegalStateException e) {
assertEquals("Migration version 1.20191028.1 found after migration version 1.20191029.1. Migrations need to be in order by version number.", e.getMessage());
}
}
}

View File

@ -619,6 +619,7 @@
<!-- 9.4.17 seems to have issues -->
<jetty_version>9.4.23.v20191118</jetty_version>
<jsr305_version>3.0.2</jsr305_version>
<flyway_version>6.0.8</flyway_version>
<!--<hibernate_version>5.2.10.Final</hibernate_version>-->
<hibernate_version>5.4.6.Final</hibernate_version>
<!-- Update lucene version when you update hibernate-search version -->
@ -1473,6 +1474,11 @@
<artifactId>xpp3_xpath</artifactId>
<version>1.1.4c</version>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
<version>${flyway_version}</version>
</dependency>
</dependencies>
</dependencyManagement>

View File

@ -20,8 +20,8 @@
As of FHIR R4, some fields that were previously of type reference are now of type canonical.
One example is QuestionnaireResponse.questionnaire. Technically this means that this field
should no longer contain a relative reference, but as they are sometimes used that way, HAPI
FHIR will now try to be permissive and will index relative link canonical fields
such as (Questionnaire/123) as though it actually was a local relative link. Thanks to
FHIR will now try to be permissive and will index relative link canonical fields
such as (Questionnaire/123) as though it actually was a local relative link. Thanks to
Dean Atchley for reporting and providing a test case!
</action>
<action type="fix">
@ -29,9 +29,9 @@
in the JPA server. This has been corrected.
</action>
<action type="change">
When parsing Bundle resources containing other resources, XML/JSON parsers have an option called
When parsing Bundle resources containing other resources, XML/JSON parsers have an option called
"override resource ID with bundle entry fullUrl". This option previously caused any value
found in Bundle.entry.fullUrl to override any value found in
found in Bundle.entry.fullUrl to override any value found in
Bundle.entry.resource.id (meaning that the parsed resource would take its ID from
the fullUrl even if that ID disagreed with the ID found in the resource itself. As of
HAPI FHIR 4.1.0 the value in Bundle.entry.fullUrl will only be used to set the parsed resource
@ -41,6 +41,11 @@
Chained searches using the _has search parameter as the chain value are now supported by
the JPA server.
</action>
<action type="change">
Changed database migration to use flyway. This adds a new table called FLY_HFJ_MIGRATION that records all
database migration tasks that have already been applied. The hapi-fhir-cli migrate tool has been changed
to use flyway. Learn more about flyway here: https://flywaydb.org/.
</action>
</release>
<release version="4.1.0" date="2019-11-13" description="Jitterbug">
<action type="add">