Merge remote-tracking branch 'remotes/origin/master' into ks-20191119-scheduler
This commit is contained in:
commit
66cc83464f
|
@ -20,8 +20,11 @@ package ca.uhn.fhir.cli;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.migrate.BaseMigrator;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.BruteForceMigrator;
|
||||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||||
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
|
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
|
||||||
|
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.apache.commons.cli.ParseException;
|
import org.apache.commons.cli.ParseException;
|
||||||
|
@ -35,10 +38,14 @@ import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NB since 2019-12-05: This class is kind of weirdly named now, since it can either use Flyway or not use Flyway
|
||||||
|
*/
|
||||||
public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends BaseCommand {
|
public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends BaseCommand {
|
||||||
|
|
||||||
|
|
||||||
public static final String MIGRATE_DATABASE = "migrate-database";
|
public static final String MIGRATE_DATABASE = "migrate-database";
|
||||||
|
public static final String DONT_USE_FLYWAY = "dont-use-flyway";
|
||||||
private Set<String> myFlags;
|
private Set<String> myFlags;
|
||||||
private String myMigrationTableName;
|
private String myMigrationTableName;
|
||||||
|
|
||||||
|
@ -78,6 +85,7 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
|
||||||
addRequiredOption(retVal, "p", "password", "Password", "The JDBC database password");
|
addRequiredOption(retVal, "p", "password", "Password", "The JDBC database password");
|
||||||
addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
|
addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
|
||||||
addOptionalOption(retVal, "x", "flags", "Flags", "A comma-separated list of any specific migration flags (these flags are version specific, see migrator documentation for details)");
|
addOptionalOption(retVal, "x", "flags", "Flags", "A comma-separated list of any specific migration flags (these flags are version specific, see migrator documentation for details)");
|
||||||
|
addOptionalOption(retVal, null, DONT_USE_FLYWAY,false, "If this option is set, the migrator will not use FlywayDB for migration. This setting should only be used if you are trying to migrate a legacy database platform that is not supported by FlywayDB.");
|
||||||
addOptionalOption(retVal, null, "no-column-shrink", false, "If this flag is set, the system will not attempt to reduce the length of columns. This is useful in environments with a lot of existing data, where shrinking a column can take a very long time.");
|
addOptionalOption(retVal, null, "no-column-shrink", false, "If this flag is set, the system will not attempt to reduce the length of columns. This is useful in environments with a lot of existing data, where shrinking a column can take a very long time.");
|
||||||
|
|
||||||
return retVal;
|
return retVal;
|
||||||
|
@ -110,7 +118,14 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
|
||||||
.filter(StringUtils::isNotBlank)
|
.filter(StringUtils::isNotBlank)
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
FlywayMigrator migrator = new FlywayMigrator(myMigrationTableName);
|
boolean dontUseFlyway = theCommandLine.hasOption("dont-use-flyway");
|
||||||
|
|
||||||
|
BaseMigrator migrator;
|
||||||
|
if (dontUseFlyway) {
|
||||||
|
migrator = new BruteForceMigrator();
|
||||||
|
} else {
|
||||||
|
migrator = new FlywayMigrator(myMigrationTableName);
|
||||||
|
}
|
||||||
migrator.setConnectionUrl(url);
|
migrator.setConnectionUrl(url);
|
||||||
migrator.setDriverType(driverType);
|
migrator.setDriverType(driverType);
|
||||||
migrator.setUsername(username);
|
migrator.setUsername(username);
|
||||||
|
@ -121,7 +136,7 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
|
||||||
migrator.migrate();
|
migrator.migrate();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract void addTasks(FlywayMigrator theMigrator);
|
protected abstract void addTasks(BaseMigrator theMigrator);
|
||||||
|
|
||||||
public void setMigrationTableName(String theMigrationTableName) {
|
public void setMigrationTableName(String theMigrationTableName) {
|
||||||
myMigrationTableName = theMigrationTableName;
|
myMigrationTableName = theMigrationTableName;
|
||||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.cli;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.migrate.BaseMigrator;
|
||||||
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
|
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
|
||||||
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
|
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||||
|
@ -44,7 +45,7 @@ public class HapiFlywayMigrateDatabaseCommand extends BaseFlywayMigrateDatabaseC
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void addTasks(FlywayMigrator theMigrator) {
|
protected void addTasks(BaseMigrator theMigrator) {
|
||||||
List<BaseTask<?>> tasks = new HapiFhirJpaMigrationTasks(getFlags()).getAllTasks(VersionEnum.values());
|
List<BaseTask<?>> tasks = new HapiFhirJpaMigrationTasks(getFlags()).getAllTasks(VersionEnum.values());
|
||||||
theMigrator.addTasks(tasks);
|
theMigrator.addTasks(tasks);
|
||||||
}
|
}
|
||||||
|
|
|
@ -77,6 +77,47 @@ public class HapiFlywayMigrateDatabaseCommandTest {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMigrateFrom340_NoFlyway() throws IOException, SQLException {
|
||||||
|
|
||||||
|
File location = getLocation("migrator_h2_test_340_current");
|
||||||
|
|
||||||
|
String url = "jdbc:h2:" + location.getAbsolutePath() + ";create=true";
|
||||||
|
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
|
||||||
|
|
||||||
|
String initSql = "/persistence_create_h2_340.sql";
|
||||||
|
executeSqlStatements(connectionProperties, initSql);
|
||||||
|
|
||||||
|
seedDatabase340(connectionProperties);
|
||||||
|
|
||||||
|
ourLog.info("**********************************************");
|
||||||
|
ourLog.info("Done Setup, Starting Migration...");
|
||||||
|
ourLog.info("**********************************************");
|
||||||
|
|
||||||
|
String[] args = new String[]{
|
||||||
|
BaseFlywayMigrateDatabaseCommand.MIGRATE_DATABASE,
|
||||||
|
"-d", "H2_EMBEDDED",
|
||||||
|
"-u", url,
|
||||||
|
"-n", "",
|
||||||
|
"-p", "",
|
||||||
|
"--" + BaseFlywayMigrateDatabaseCommand.DONT_USE_FLYWAY
|
||||||
|
};
|
||||||
|
assertFalse(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RES_REINDEX_JOB"));
|
||||||
|
App.main(args);
|
||||||
|
assertTrue(JdbcUtils.getTableNames(connectionProperties).contains("HFJ_RES_REINDEX_JOB"));
|
||||||
|
|
||||||
|
connectionProperties.getTxTemplate().execute(t -> {
|
||||||
|
JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
|
||||||
|
List<Map<String, Object>> values = jdbcTemplate.queryForList("SELECT * FROM hfj_spidx_token");
|
||||||
|
assertEquals(1, values.size());
|
||||||
|
assertEquals("identifier", values.get(0).get("SP_NAME"));
|
||||||
|
assertEquals("12345678", values.get(0).get("SP_VALUE"));
|
||||||
|
assertTrue(values.get(0).keySet().contains("HASH_IDENTITY"));
|
||||||
|
assertEquals(7001889285610424179L, values.get(0).get("HASH_IDENTITY"));
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testMigrateFromEmptySchema() throws IOException, SQLException {
|
public void testMigrateFromEmptySchema() throws IOException, SQLException {
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,280 @@
|
||||||
|
{
|
||||||
|
"comment": [
|
||||||
|
"This is the HAPI FHIR Global Atlas data file. Please feel free to file a pull request",
|
||||||
|
"if you would like to add or update your project or organization. Any elements named",
|
||||||
|
"comment (such as this one) will be ignored because lousy JSON has no comments."
|
||||||
|
],
|
||||||
|
"points": [
|
||||||
|
{
|
||||||
|
"title": "OKDC - FHIR Manager",
|
||||||
|
"description": "Post orders, get results from federal laboratory system",
|
||||||
|
"lat": 47.223040,
|
||||||
|
"lon": 39.721380,
|
||||||
|
"city": "Rostov-On-Don",
|
||||||
|
"added": "2019-08-19"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Care Connect Reference Implementation",
|
||||||
|
"description": "UK/England NHS Reference FHIR Implementation",
|
||||||
|
"company": "Mayfield IS",
|
||||||
|
"contactName": "Kevin Mayfield",
|
||||||
|
"contactEmail": "Kevin.mayfield@mayfield-is.co.uk",
|
||||||
|
"link": "https://data.developer.nhs.uk/ccri/exp",
|
||||||
|
"lat": 55.378052,
|
||||||
|
"lon": -3.435973,
|
||||||
|
"added": "2019-08-19"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "clinFHIR",
|
||||||
|
"contactName": "David Hay",
|
||||||
|
"contactEmail": "davidhay@gmail.com",
|
||||||
|
"link": "http://clinfhir.com",
|
||||||
|
"lat": -42.651737,
|
||||||
|
"lon": 171.926909,
|
||||||
|
"added": "2019-08-18"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "EDS AP-HP",
|
||||||
|
"description": "Health Datawarehouse",
|
||||||
|
"lat": 48.8647,
|
||||||
|
"lon": 2.349014,
|
||||||
|
"city": "Paris",
|
||||||
|
"added": "2019-09-02"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "eHealth Innovation",
|
||||||
|
"description": "eHealth Innovation uses HAPI FHIR to power patient apps for managing chronic conditions",
|
||||||
|
"link": "http://ehealthinnovation.org",
|
||||||
|
"lat": 43.741667,
|
||||||
|
"lon": -79.373333,
|
||||||
|
"city": "Toronto",
|
||||||
|
"added": "2019-11-20"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "CSIRO OntoServer",
|
||||||
|
"description": "Australia's national terminology server. FHIR-native, fast, and comprehensive support for FHIR DSTU2.1, STU3, and R4",
|
||||||
|
"link": "https://ontoserver.csiro.au",
|
||||||
|
"contactName": "Michael Lawley",
|
||||||
|
"contactEmail": "ontoserver-support@csiro.au",
|
||||||
|
"city": "Brisbane",
|
||||||
|
"lat": -27.469770,
|
||||||
|
"lon": 153.025131
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "MOLIT Institut: VITU, HUB, EQU",
|
||||||
|
"description": "Virtual tumorboard based on FHIR, Management Frontend to import Patients from HIS and assign Questionnaire Tasks, Electronic Questionnaire based on FHIR Questionnaire",
|
||||||
|
"link": "https://demo.molit.eu",
|
||||||
|
"contactName": "Patrick Werner",
|
||||||
|
"contactEmail": "pa.f.werner@gmail.com",
|
||||||
|
"city": "Germany",
|
||||||
|
"lat": 50.1188,
|
||||||
|
"lon": 8.6843,
|
||||||
|
"added": "2019-08-18"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Altarum Institute",
|
||||||
|
"description": null,
|
||||||
|
"link": "https://demo.molit.eu",
|
||||||
|
"contactName": "Altarum Institute",
|
||||||
|
"contactEmail": "ray.humphrys@altarum.org",
|
||||||
|
"city": "Michigan",
|
||||||
|
"lat": 42.2807,
|
||||||
|
"lon": -83.7801,
|
||||||
|
"added": "2019-08-18"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Audacious Inquiry",
|
||||||
|
"description": "HIE Services",
|
||||||
|
"link": null,
|
||||||
|
"contactName": "Keith Boone",
|
||||||
|
"contactEmail": "kboone@ainq.com",
|
||||||
|
"city": "Maryland",
|
||||||
|
"lat": 39.4756,
|
||||||
|
"lon": -76.2420,
|
||||||
|
"added": "2019-08-18"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "InfoClinic",
|
||||||
|
"description": "InfoClinic HAPI FHIR R4 Testing Server",
|
||||||
|
"link": "http://fhir.infoclinic.co",
|
||||||
|
"contactName": null,
|
||||||
|
"contactEmail": null,
|
||||||
|
"city": "Seoul, South Korea",
|
||||||
|
"lat": 37.5339,
|
||||||
|
"lon": 126.9775,
|
||||||
|
"added": "2019-08-18"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "PGD FHIR",
|
||||||
|
"description": "Apothesource",
|
||||||
|
"link": null,
|
||||||
|
"contactName": null,
|
||||||
|
"contactEmail": null,
|
||||||
|
"city": "South Carolina, USA",
|
||||||
|
"lat": 34.2296,
|
||||||
|
"lon": -80.7147,
|
||||||
|
"added": "2019-08-19"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Drummond Group: Interoperability Hub",
|
||||||
|
"description": "Certified EHR FHIR Surveillance App",
|
||||||
|
"link": "http://www.interophub.com",
|
||||||
|
"contactName": null,
|
||||||
|
"contactEmail": null,
|
||||||
|
"city": "USA",
|
||||||
|
"lat": 35.3961,
|
||||||
|
"lon": -81.6303,
|
||||||
|
"added": "2019-08-19"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "United States Digital Service/CMS",
|
||||||
|
"description": "Data at the Point of Care: FHIR API for enabling providers to retrieve CMS claims data in a bulk manner.",
|
||||||
|
"link": "http://dpc.cms.gov",
|
||||||
|
"contactName": null,
|
||||||
|
"contactEmail": null,
|
||||||
|
"city": "Washington DC, USA",
|
||||||
|
"lat": 38.8939,
|
||||||
|
"lon": -77.0365,
|
||||||
|
"added": "2019-08-20"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "AHDIS AG",
|
||||||
|
"description": "Matchbox: Validation and conversion services",
|
||||||
|
"link": "http://dpc.cms.gov",
|
||||||
|
"contactName": "Oliver Egger",
|
||||||
|
"contactEmail": "oliver.egger@ahdis.ch",
|
||||||
|
"city": "Switzerland",
|
||||||
|
"lat": 47.2569,
|
||||||
|
"lon": 8.6989,
|
||||||
|
"added": "2019-08-20"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Trifork",
|
||||||
|
"description": "National Telemedicine Project",
|
||||||
|
"link": "https://ehealth-documentation.s3-eu-west-1.amazonaws.com/latest/ig/index.html",
|
||||||
|
"contactName": "Jens Villadsen",
|
||||||
|
"contactEmail": "jvi@trifork.com",
|
||||||
|
"city": "Aarhus, Denmark",
|
||||||
|
"lat": 56.1534,
|
||||||
|
"lon": 10.2255,
|
||||||
|
"added": "2019-08-20"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "VietTel",
|
||||||
|
"description": "VEHR-HL7",
|
||||||
|
"link": null,
|
||||||
|
"contactName": null,
|
||||||
|
"contactEmail": null,
|
||||||
|
"city": "Vietnam",
|
||||||
|
"lat": 10.8142,
|
||||||
|
"lon": 106.6438,
|
||||||
|
"added": "2019-08-20"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "ISKRA Solutions Inc.",
|
||||||
|
"description": "Automatic reconciliation of FHIR messages: Highly scalable FHIR based microservice that uses rules engine to automatically match/reconcile FHIR submissions based on externalized configurable set of business defined criteria.",
|
||||||
|
"link": null,
|
||||||
|
"contactName": null,
|
||||||
|
"contactEmail": null,
|
||||||
|
"city": "Toronto, Canada",
|
||||||
|
"lat": 43.741667,
|
||||||
|
"lon": -79.373333,
|
||||||
|
"added": "2019-08-20"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Insiel S.p.a.",
|
||||||
|
"description": null,
|
||||||
|
"link": null,
|
||||||
|
"contactName": "Ivano Tomainu",
|
||||||
|
"contactEmail": "ivano.tomainu@insiel.it",
|
||||||
|
"city": "Italy",
|
||||||
|
"lat": 38.1193,
|
||||||
|
"lon": 13.3675,
|
||||||
|
"added": "2019-08-20"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Insiel S.p.a.",
|
||||||
|
"description": "PathLab.Tech",
|
||||||
|
"link": null,
|
||||||
|
"contactName": "Prateek Patil",
|
||||||
|
"contactEmail": "prateek@pathlab.tech",
|
||||||
|
"city": "Santa Clara, California",
|
||||||
|
"lat": 37.3526,
|
||||||
|
"lon": -121.9541,
|
||||||
|
"added": "2019-09-13"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Telemedis",
|
||||||
|
"description": "tisFHIR: Telemedicine Information System",
|
||||||
|
"link": null,
|
||||||
|
"contactName": "Władysław Węglarz",
|
||||||
|
"contactEmail": "wladyslaw.weglarz@telemedis.pl",
|
||||||
|
"city": "Warsaw, Poland",
|
||||||
|
"lat": 52.2500,
|
||||||
|
"lon": 21.0000,
|
||||||
|
"added": "2019-09-13"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "CINVESTAV",
|
||||||
|
"description": "Estudio de algoritmos criptograficos ligeros para la seguridad de expedientes clínicos electrónicos",
|
||||||
|
"link": null,
|
||||||
|
"contactName": null,
|
||||||
|
"contactEmail": null,
|
||||||
|
"city": null,
|
||||||
|
"lat": 40.4172,
|
||||||
|
"lon": -3.6840,
|
||||||
|
"added": "2019-09-29"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "ResMed",
|
||||||
|
"description": "Digital Health Technologies: The largest tele-monitored device fleet in the world. Focus on various respiratory diseases from Sleep Disordered breathing (sleep apnea) to COPD.",
|
||||||
|
"link": null,
|
||||||
|
"contactName": null,
|
||||||
|
"contactEmail": null,
|
||||||
|
"city": "San Diego, California",
|
||||||
|
"lat": 32.8246,
|
||||||
|
"lon": -117.1971,
|
||||||
|
"added": "2019-09-29"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Highmark",
|
||||||
|
"description": "FHIR-based Provider Directory Message Model: Using R4 FHIR we created a standard provider directory message model that captures all practitioners, practitioner roles, vendors, vendor roles, locations, healthcare services, affiliated directory networks and tiered insurance plans. We supply this FHIR-based extract to a well-known transparency vendor whom processes our FHIR message model extract and loads the data to be consumed and accessed on a web-based provider search application. Members can then access this provider search app to search for practitioners whom participate in their products and networks.",
|
||||||
|
"link": "https://highmark.sapphirecareselect.com",
|
||||||
|
"contactName": "Wayne Williams",
|
||||||
|
"contactEmail": "Wayne.Williams@hmhs.com",
|
||||||
|
"city": "Pittsburgh, PA",
|
||||||
|
"lat": 40.4562,
|
||||||
|
"lon": -79.8163,
|
||||||
|
"added": "2019-09-29"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "ePHealth: ACS Health Care",
|
||||||
|
"description": "With the ACS App, Community Health Agents (CHA) work gets faster and faster, and they can take better care of their community. Best of all, it is 100% free to the Health Agent.",
|
||||||
|
"link": "http://ephealth.com.br/?lang=en",
|
||||||
|
"contactName": "Alan Lescano",
|
||||||
|
"contactEmail": "alan@ephealth.com.br",
|
||||||
|
"city": "Sao Paulo, Brazil",
|
||||||
|
"lat": -6.6167,
|
||||||
|
"lon": -38.1500,
|
||||||
|
"added": "2019-09-29"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "CENS",
|
||||||
|
"description": "Centro Nacional en Sistemas de Información en Salud - CENS operates a national testing infrastructure based on HAPI FHIR.",
|
||||||
|
"link": "http://cens.cl",
|
||||||
|
"city": "Santiago, Chile",
|
||||||
|
"lat": -33.4000,
|
||||||
|
"lon": -70.6000,
|
||||||
|
"added": "2019-11-25"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Software Interoperability Lab for Asia",
|
||||||
|
"description": "",
|
||||||
|
"link": "",
|
||||||
|
"city": "Manila, Philippines",
|
||||||
|
"lat": 14.6914,
|
||||||
|
"lon": 120.9686,
|
||||||
|
"added": "2019-11-25"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,72 @@
|
||||||
|
package ca.uhn.fhir.jpa.migrate;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||||
|
import org.flywaydb.core.api.MigrationInfoService;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
public abstract class BaseMigrator {
|
||||||
|
|
||||||
|
private boolean myDryRun;
|
||||||
|
private boolean myNoColumnShrink;
|
||||||
|
private DriverTypeEnum myDriverType;
|
||||||
|
private String myConnectionUrl;
|
||||||
|
private String myUsername;
|
||||||
|
private String myPassword;
|
||||||
|
|
||||||
|
public abstract void migrate();
|
||||||
|
|
||||||
|
public boolean isDryRun() {
|
||||||
|
return myDryRun;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDryRun(boolean theDryRun) {
|
||||||
|
myDryRun = theDryRun;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isNoColumnShrink() {
|
||||||
|
return myNoColumnShrink;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setNoColumnShrink(boolean theNoColumnShrink) {
|
||||||
|
myNoColumnShrink = theNoColumnShrink;
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract Optional<MigrationInfoService> getMigrationInfo();
|
||||||
|
|
||||||
|
public abstract void addTasks(List<BaseTask<?>> theMigrationTasks);
|
||||||
|
|
||||||
|
public DriverTypeEnum getDriverType() {
|
||||||
|
return myDriverType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDriverType(DriverTypeEnum theDriverType) {
|
||||||
|
myDriverType = theDriverType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getConnectionUrl() {
|
||||||
|
return myConnectionUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setConnectionUrl(String theConnectionUrl) {
|
||||||
|
myConnectionUrl = theConnectionUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getUsername() {
|
||||||
|
return myUsername;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUsername(String theUsername) {
|
||||||
|
myUsername = theUsername;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPassword() {
|
||||||
|
return myPassword;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPassword(String thePassword) {
|
||||||
|
myPassword = thePassword;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,50 @@
|
||||||
|
package ca.uhn.fhir.jpa.migrate;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
import org.flywaydb.core.api.MigrationInfoService;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class is an alternative to {@link FlywayMigrator). It doesn't use Flyway, but instead just
|
||||||
|
* executes all tasks.
|
||||||
|
*/
|
||||||
|
public class BruteForceMigrator extends BaseMigrator {
|
||||||
|
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(BruteForceMigrator.class);
|
||||||
|
private List<BaseTask<?>> myTasks = new ArrayList<>();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void migrate() {
|
||||||
|
for (BaseTask<?> next : myTasks) {
|
||||||
|
next.setDriverType(getDriverType());
|
||||||
|
next.setDryRun(isDryRun());
|
||||||
|
next.setNoColumnShrink(isNoColumnShrink());
|
||||||
|
DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getConnectionUrl(), getUsername(), getPassword());
|
||||||
|
next.setConnectionProperties(connectionProperties);
|
||||||
|
|
||||||
|
try {
|
||||||
|
ourLog.info("Executing task of type: {}", next.getClass().getSimpleName());
|
||||||
|
next.execute();
|
||||||
|
} catch (SQLException e) {
|
||||||
|
throw new InternalErrorException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<MigrationInfoService> getMigrationInfo() {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addTasks(List<BaseTask<?>> theMigrationTasks) {
|
||||||
|
myTasks.addAll(theMigrationTasks);
|
||||||
|
}
|
||||||
|
}
|
|
@ -31,34 +31,30 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
public class FlywayMigrator {
|
public class FlywayMigrator extends BaseMigrator {
|
||||||
|
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(FlywayMigrator.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(FlywayMigrator.class);
|
||||||
|
|
||||||
private DriverTypeEnum myDriverType;
|
|
||||||
private final String myMigrationTableName;
|
private final String myMigrationTableName;
|
||||||
private String myConnectionUrl;
|
|
||||||
private String myUsername;
|
|
||||||
private String myPassword;
|
|
||||||
private List<FlywayMigration> myTasks = new ArrayList<>();
|
private List<FlywayMigration> myTasks = new ArrayList<>();
|
||||||
private boolean myDryRun;
|
|
||||||
private boolean myNoColumnShrink;
|
|
||||||
|
|
||||||
public FlywayMigrator(String theMigrationTableName, BasicDataSource theDataSource) {
|
public FlywayMigrator(String theMigrationTableName, BasicDataSource theDataSource) {
|
||||||
this(theMigrationTableName);
|
this(theMigrationTableName);
|
||||||
myConnectionUrl = theDataSource.getUrl();
|
setConnectionUrl(theDataSource.getUrl());
|
||||||
myUsername = theDataSource.getUsername();
|
setUsername(theDataSource.getUsername());
|
||||||
myPassword = theDataSource.getPassword();
|
setPassword(theDataSource.getPassword());
|
||||||
|
|
||||||
String driverClassName = theDataSource.getDriverClassName();
|
String driverClassName = theDataSource.getDriverClassName();
|
||||||
if (driverClassName == null) {
|
if (driverClassName == null) {
|
||||||
ourLog.error(this.getClass().getSimpleName() + " constructed without a database driver");
|
ourLog.error(this.getClass().getSimpleName() + " constructed without a database driver");
|
||||||
} else {
|
} else {
|
||||||
myDriverType = DriverTypeEnum.fromDriverClassName(driverClassName);
|
DriverTypeEnum driverType = DriverTypeEnum.fromDriverClassName(driverClassName);
|
||||||
if (myDriverType == null) {
|
if (driverType == null) {
|
||||||
ourLog.error("Unknown driver class " + driverClassName);
|
ourLog.error("Unknown driver class " + driverClassName);
|
||||||
}
|
}
|
||||||
|
setDriverType(driverType);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,32 +62,13 @@ public class FlywayMigrator {
|
||||||
myMigrationTableName = theMigrationTableName;
|
myMigrationTableName = theMigrationTableName;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setDriverType(DriverTypeEnum theDriverType) {
|
|
||||||
myDriverType = theDriverType;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setConnectionUrl(String theConnectionUrl) {
|
|
||||||
myConnectionUrl = theConnectionUrl;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setUsername(String theUsername) {
|
|
||||||
myUsername = theUsername;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setPassword(String thePassword) {
|
|
||||||
myPassword = thePassword;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void addTask(BaseTask<?> theTask) {
|
public void addTask(BaseTask<?> theTask) {
|
||||||
myTasks.add(new FlywayMigration(theTask, this));
|
myTasks.add(new FlywayMigration(theTask, this));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setDryRun(boolean theDryRun) {
|
@Override
|
||||||
myDryRun = theDryRun;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void migrate() {
|
public void migrate() {
|
||||||
try (DriverTypeEnum.ConnectionProperties connectionProperties = myDriverType.newConnectionProperties(myConnectionUrl, myUsername, myPassword)) {
|
try (DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getConnectionUrl(), getUsername(), getPassword())) {
|
||||||
Flyway flyway = initFlyway(connectionProperties);
|
Flyway flyway = initFlyway(connectionProperties);
|
||||||
flyway.migrate();
|
flyway.migrate();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -103,7 +80,7 @@ public class FlywayMigrator {
|
||||||
// TODO KHS Is there a way we can use datasource instead of url, username, password here
|
// TODO KHS Is there a way we can use datasource instead of url, username, password here
|
||||||
Flyway flyway = Flyway.configure()
|
Flyway flyway = Flyway.configure()
|
||||||
.table(myMigrationTableName)
|
.table(myMigrationTableName)
|
||||||
.dataSource(myConnectionUrl, myUsername, myPassword)
|
.dataSource(getConnectionUrl(), getUsername(), getPassword())
|
||||||
.baselineOnMigrate(true)
|
.baselineOnMigrate(true)
|
||||||
.javaMigrations(myTasks.toArray(new JavaMigration[0]))
|
.javaMigrations(myTasks.toArray(new JavaMigration[0]))
|
||||||
.load();
|
.load();
|
||||||
|
@ -113,45 +90,19 @@ public class FlywayMigrator {
|
||||||
return flyway;
|
return flyway;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public void addTasks(List<BaseTask<?>> theTasks) {
|
public void addTasks(List<BaseTask<?>> theTasks) {
|
||||||
theTasks.forEach(this::addTask);
|
theTasks.forEach(this::addTask);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setNoColumnShrink(boolean theNoColumnShrink) {
|
@Override
|
||||||
myNoColumnShrink = theNoColumnShrink;
|
public Optional<MigrationInfoService> getMigrationInfo() {
|
||||||
}
|
if (getDriverType() == null) {
|
||||||
|
return Optional.empty();
|
||||||
public DriverTypeEnum getDriverType() {
|
|
||||||
return myDriverType;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getConnectionUrl() {
|
|
||||||
return myConnectionUrl;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getUsername() {
|
|
||||||
return myUsername;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getPassword() {
|
|
||||||
return myPassword;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isDryRun() {
|
|
||||||
return myDryRun;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isNoColumnShrink() {
|
|
||||||
return myNoColumnShrink;
|
|
||||||
}
|
|
||||||
|
|
||||||
public MigrationInfoService getMigrationInfo() {
|
|
||||||
if (myDriverType == null) {
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
try (DriverTypeEnum.ConnectionProperties connectionProperties = myDriverType.newConnectionProperties(myConnectionUrl, myUsername, myPassword)) {
|
try (DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getConnectionUrl(), getUsername(), getPassword())) {
|
||||||
Flyway flyway = initFlyway(connectionProperties);
|
Flyway flyway = initFlyway(connectionProperties);
|
||||||
return flyway.info();
|
return Optional.of(flyway.info());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -32,25 +32,36 @@ import org.slf4j.LoggerFactory;
|
||||||
import java.sql.Connection;
|
import java.sql.Connection;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
|
||||||
public class SchemaMigrator {
|
public class SchemaMigrator {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(SchemaMigrator.class);
|
|
||||||
public static final String HAPI_FHIR_MIGRATION_TABLENAME = "FLY_HFJ_MIGRATION";
|
public static final String HAPI_FHIR_MIGRATION_TABLENAME = "FLY_HFJ_MIGRATION";
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(SchemaMigrator.class);
|
||||||
private final BasicDataSource myDataSource;
|
private final BasicDataSource myDataSource;
|
||||||
private final FlywayMigrator myMigrator;
|
|
||||||
private final boolean mySkipValidation;
|
private final boolean mySkipValidation;
|
||||||
|
private final String myMigrationTableName;
|
||||||
|
private final List<BaseTask<?>> myMigrationTasks;
|
||||||
|
private boolean myDontUseFlyway;
|
||||||
|
private DriverTypeEnum myDriverType;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor
|
||||||
|
*/
|
||||||
public SchemaMigrator(String theMigrationTableName, BasicDataSource theDataSource, Properties jpaProperties, List<BaseTask<?>> theMigrationTasks) {
|
public SchemaMigrator(String theMigrationTableName, BasicDataSource theDataSource, Properties jpaProperties, List<BaseTask<?>> theMigrationTasks) {
|
||||||
myDataSource = theDataSource;
|
myDataSource = theDataSource;
|
||||||
|
myMigrationTableName = theMigrationTableName;
|
||||||
|
myMigrationTasks = theMigrationTasks;
|
||||||
|
|
||||||
if (jpaProperties.containsKey(AvailableSettings.HBM2DDL_AUTO) && "update".equals(jpaProperties.getProperty(AvailableSettings.HBM2DDL_AUTO))) {
|
if (jpaProperties.containsKey(AvailableSettings.HBM2DDL_AUTO) && "update".equals(jpaProperties.getProperty(AvailableSettings.HBM2DDL_AUTO))) {
|
||||||
mySkipValidation = true;
|
mySkipValidation = true;
|
||||||
} else {
|
} else {
|
||||||
mySkipValidation = false;
|
mySkipValidation = false;
|
||||||
}
|
}
|
||||||
myMigrator = new FlywayMigrator(theMigrationTableName, theDataSource);
|
}
|
||||||
myMigrator.addTasks(theMigrationTasks);
|
|
||||||
|
public void setDontUseFlyway(boolean theDontUseFlyway) {
|
||||||
|
myDontUseFlyway = theDontUseFlyway;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void validate() {
|
public void validate() {
|
||||||
|
@ -59,13 +70,15 @@ public class SchemaMigrator {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try (Connection connection = myDataSource.getConnection()) {
|
try (Connection connection = myDataSource.getConnection()) {
|
||||||
MigrationInfoService migrationInfo = myMigrator.getMigrationInfo();
|
Optional<MigrationInfoService> migrationInfo = newMigrator().getMigrationInfo();
|
||||||
if (migrationInfo.pending().length > 0) {
|
if (migrationInfo.isPresent()) {
|
||||||
throw new ConfigurationException("The database schema for " + myDataSource.getUrl() + " is out of date. " +
|
if (migrationInfo.get().pending().length > 0) {
|
||||||
"Current database schema version is " + getCurrentVersion(migrationInfo) + ". Schema version required by application is " +
|
throw new ConfigurationException("The database schema for " + myDataSource.getUrl() + " is out of date. " +
|
||||||
getLastVersion(migrationInfo) + ". Please run the database migrator.");
|
"Current database schema version is " + getCurrentVersion(migrationInfo.get()) + ". Schema version required by application is " +
|
||||||
|
getLastVersion(migrationInfo.get()) + ". Please run the database migrator.");
|
||||||
|
}
|
||||||
|
ourLog.info("Database schema confirmed at expected version " + getCurrentVersion(migrationInfo.get()));
|
||||||
}
|
}
|
||||||
ourLog.info("Database schema confirmed at expected version " + getCurrentVersion(migrationInfo));
|
|
||||||
} catch (SQLException e) {
|
} catch (SQLException e) {
|
||||||
throw new ConfigurationException("Unable to connect to " + myDataSource.toString(), e);
|
throw new ConfigurationException("Unable to connect to " + myDataSource.toString(), e);
|
||||||
}
|
}
|
||||||
|
@ -76,7 +89,22 @@ public class SchemaMigrator {
|
||||||
ourLog.warn("Database running in hibernate auto-update mode. Skipping schema migration.");
|
ourLog.warn("Database running in hibernate auto-update mode. Skipping schema migration.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
myMigrator.migrate();
|
newMigrator().migrate();
|
||||||
|
}
|
||||||
|
|
||||||
|
private BaseMigrator newMigrator() {
|
||||||
|
BaseMigrator migrator;
|
||||||
|
if (myDontUseFlyway) {
|
||||||
|
migrator = new BruteForceMigrator();
|
||||||
|
migrator.setDriverType(myDriverType);
|
||||||
|
migrator.setConnectionUrl(myDataSource.getUrl());
|
||||||
|
migrator.setUsername(myDataSource.getUsername());
|
||||||
|
migrator.setPassword(myDataSource.getPassword());
|
||||||
|
} else {
|
||||||
|
migrator = new FlywayMigrator(myMigrationTableName, myDataSource);
|
||||||
|
}
|
||||||
|
migrator.addTasks(myMigrationTasks);
|
||||||
|
return migrator;
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getCurrentVersion(MigrationInfoService theMigrationInfo) {
|
private String getCurrentVersion(MigrationInfoService theMigrationInfo) {
|
||||||
|
@ -94,4 +122,8 @@ public class SchemaMigrator {
|
||||||
}
|
}
|
||||||
return "unknown";
|
return "unknown";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setDriverType(DriverTypeEnum theDriverType) {
|
||||||
|
myDriverType = theDriverType;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -357,23 +357,24 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
|
|
||||||
version.onTable("HFJ_SEARCH_PARM").dropThisTable("20190814.8");
|
version.onTable("HFJ_SEARCH_PARM").dropThisTable("20190814.8");
|
||||||
|
|
||||||
version.onTable("HFJ_SPIDX_COORDS").modifyColumn("20190814.9", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
// Make some columns non-nullable that were previously nullable - These are marked as failure allowed, since
|
||||||
version.onTable("HFJ_SPIDX_DATE").modifyColumn("20190814.10", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
// SQL Server won't let us change nullability on columns with indexes pointing to them
|
||||||
version.onTable("HFJ_SPIDX_STRING").modifyColumn("20190814.11", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
version.onTable("HFJ_SPIDX_COORDS").modifyColumn("20190814.9", "RES_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||||
|
version.onTable("HFJ_SPIDX_DATE").modifyColumn("20190814.10", "RES_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||||
|
version.onTable("HFJ_SPIDX_STRING").modifyColumn("20190814.11", "RES_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||||
version.onTable("HFJ_SPIDX_STRING").addColumn("20190814.12", "HASH_IDENTITY").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
version.onTable("HFJ_SPIDX_STRING").addColumn("20190814.12", "HASH_IDENTITY").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
|
||||||
version.onTable("HFJ_SPIDX_STRING").addIndex("20190814.13", "IDX_SP_STRING_HASH_IDENT").unique(false).withColumns("HASH_IDENTITY");
|
version.onTable("HFJ_SPIDX_STRING").addIndex("20190814.13", "IDX_SP_STRING_HASH_IDENT").unique(false).withColumns("HASH_IDENTITY");
|
||||||
version.onTable("HFJ_SPIDX_COORDS").modifyColumn("20190814.14", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
version.onTable("HFJ_SPIDX_COORDS").modifyColumn("20190814.14", "RES_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||||
version.onTable("HFJ_SPIDX_QUANTITY").modifyColumn("20190814.15", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
version.onTable("HFJ_SPIDX_QUANTITY").modifyColumn("20190814.15", "RES_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||||
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.16", "HASH_UNITS_AND_VALPREFIX");
|
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.16", "HASH_UNITS_AND_VALPREFIX");
|
||||||
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.17", "HASH_VALPREFIX");
|
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.17", "HASH_VALPREFIX");
|
||||||
version.onTable("HFJ_SPIDX_NUMBER").modifyColumn("20190814.18", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
version.onTable("HFJ_SPIDX_NUMBER").modifyColumn("20190814.18", "RES_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||||
version.onTable("HFJ_SPIDX_TOKEN").modifyColumn("20190814.19", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
version.onTable("HFJ_SPIDX_TOKEN").modifyColumn("20190814.19", "RES_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||||
version.onTable("HFJ_SPIDX_URI").modifyColumn("20190814.20", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
version.onTable("HFJ_SPIDX_URI").modifyColumn("20190814.20", "RES_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 100);
|
||||||
version.onTable("HFJ_SPIDX_URI").modifyColumn("20190814.21", "SP_URI").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 254);
|
version.onTable("HFJ_SPIDX_URI").modifyColumn("20190814.21", "SP_URI").nullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 254);
|
||||||
|
version.onTable("TRM_CODESYSTEM").modifyColumn("20190814.22", "CODE_SYSTEM_URI").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||||
version.onTable("TRM_CODESYSTEM").modifyColumn("20190814.22", "CODE_SYSTEM_URI").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
version.onTable("TRM_CODESYSTEM").modifyColumn("20190814.23", "CS_NAME").nullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||||
version.onTable("TRM_CODESYSTEM").modifyColumn("20190814.23", "CS_NAME").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
version.onTable("TRM_CODESYSTEM_VER").modifyColumn("20190814.24", "CS_VERSION_ID").nullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
||||||
version.onTable("TRM_CODESYSTEM_VER").modifyColumn("20190814.24", "CS_VERSION_ID").nullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,24 +3,32 @@ package ca.uhn.fhir.jpa.migrate;
|
||||||
import ca.uhn.fhir.context.ConfigurationException;
|
import ca.uhn.fhir.context.ConfigurationException;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.AddTableRawSqlTask;
|
import ca.uhn.fhir.jpa.migrate.taskdef.AddTableRawSqlTask;
|
||||||
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTest;
|
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTest;
|
||||||
|
import com.google.common.collect.ImmutableList;
|
||||||
|
import org.hamcrest.Matchers;
|
||||||
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.mockito.ArgumentMatchers;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertThat;
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
|
import static org.mockito.ArgumentMatchers.contains;
|
||||||
|
|
||||||
public class SchemaMigratorTest extends BaseTest {
|
public class SchemaMigratorTest extends BaseTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void migrationRequired() {
|
public void testMigrationRequired() {
|
||||||
AddTableRawSqlTask task = new AddTableRawSqlTask("1", "1");
|
AddTableRawSqlTask task = new AddTableRawSqlTask("1", "1");
|
||||||
task.setTableName("SOMETABLE");
|
task.setTableName("SOMETABLE");
|
||||||
task.addSql(DriverTypeEnum.H2_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
task.addSql(DriverTypeEnum.H2_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||||
getMigrator().addTask(task);
|
|
||||||
|
|
||||||
SchemaMigrator schemaMigrator = new SchemaMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), Collections.singletonList(task));
|
SchemaMigrator schemaMigrator = new SchemaMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), ImmutableList.of(task));
|
||||||
|
schemaMigrator.setDriverType(DriverTypeEnum.H2_EMBEDDED);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
schemaMigrator.validate();
|
schemaMigrator.validate();
|
||||||
|
@ -28,7 +36,34 @@ public class SchemaMigratorTest extends BaseTest {
|
||||||
} catch (ConfigurationException e) {
|
} catch (ConfigurationException e) {
|
||||||
assertEquals("The database schema for " + getUrl() + " is out of date. Current database schema version is unknown. Schema version required by application is " + task.getFlywayVersion() + ". Please run the database migrator.", e.getMessage());
|
assertEquals("The database schema for " + getUrl() + " is out of date. Current database schema version is unknown. Schema version required by application is " + task.getFlywayVersion() + ". Please run the database migrator.", e.getMessage());
|
||||||
}
|
}
|
||||||
getMigrator().migrate();
|
|
||||||
|
schemaMigrator.migrate();
|
||||||
|
|
||||||
schemaMigrator.validate();
|
schemaMigrator.validate();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMigrationRequiredNoFlyway() throws SQLException {
|
||||||
|
AddTableRawSqlTask task = new AddTableRawSqlTask("1", "1");
|
||||||
|
task.setTableName("SOMETABLE");
|
||||||
|
task.addSql(DriverTypeEnum.H2_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||||
|
|
||||||
|
SchemaMigrator schemaMigrator = new SchemaMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), ImmutableList.of(task));
|
||||||
|
schemaMigrator.setDriverType(DriverTypeEnum.H2_EMBEDDED);
|
||||||
|
schemaMigrator.setDontUseFlyway(true);
|
||||||
|
|
||||||
|
// Validate shouldn't fail if we aren't using Flyway
|
||||||
|
schemaMigrator.validate();
|
||||||
|
|
||||||
|
schemaMigrator.migrate();
|
||||||
|
|
||||||
|
schemaMigrator.validate();
|
||||||
|
|
||||||
|
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(getDataSource().getUrl(), getDataSource().getUsername(), getDataSource().getPassword());
|
||||||
|
Set<String> tableNames = JdbcUtils.getTableNames(connectionProperties);
|
||||||
|
assertThat(tableNames, Matchers.contains("SOMETABLE"));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,9 +67,9 @@ public class DropTableTest extends BaseTest {
|
||||||
|
|
||||||
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), (hasItems("SOMETABLE")));
|
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), (hasItems("SOMETABLE")));
|
||||||
|
|
||||||
assertThat(getMigrator().getMigrationInfo().pending().length, greaterThan(0));
|
assertThat(getMigrator().getMigrationInfo().get().pending().length, greaterThan(0));
|
||||||
getMigrator().migrate();
|
getMigrator().migrate();
|
||||||
assertThat(getMigrator().getMigrationInfo().pending().length, equalTo(0));
|
assertThat(getMigrator().getMigrationInfo().get().pending().length, equalTo(0));
|
||||||
|
|
||||||
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), not(hasItems("SOMETABLE")));
|
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), not(hasItems("SOMETABLE")));
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue