Work on migration

This commit is contained in:
James Agnew 2018-09-08 07:14:43 +08:00
parent f5d567cd00
commit 4852b0b60f
21 changed files with 835 additions and 204 deletions

View File

@ -88,6 +88,27 @@
<artifactId>commons-cli</artifactId>
</dependency>
<dependency>
<groupId>org.apache.derby</groupId>
<artifactId>derby</artifactId>
</dependency>
<dependency>
<groupId>org.mariadb.jdbc</groupId>
<artifactId>mariadb-java-client</artifactId>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>

View File

@ -157,7 +157,7 @@ public abstract class BaseApp {
commands.add(new IgPackUploader());
commands.add(new ExportConceptMapToCsvCommand());
commands.add(new ImportCsvToConceptMapCommand());
commands.add(new BaseMigrateDatabaseCommand());
commands.add(new HapiMigrateDatabaseCommand());
return commands;
}

View File

@ -1,9 +1,12 @@
package ca.uhn.fhir.cli;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.Migrator;
import ca.uhn.fhir.util.VersionEnum;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.jetbrains.annotations.NotNull;
import java.util.Arrays;
import java.util.Collections;
@ -11,7 +14,7 @@ import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
public class BaseMigrateDatabaseCommand extends BaseCommand {
public abstract class BaseMigrateDatabaseCommand extends BaseCommand {
@Override
public String getCommandDescription() {
return "This command migrates a HAPI FHIR JPA database from one version of HAPI FHIR to a newer version";
@ -26,15 +29,22 @@ public class BaseMigrateDatabaseCommand extends BaseCommand {
public Options getOptions() {
Options retVal = new Options();
addOptionalOption(retVal, "r", "dry-run", false, "Log the SQL statements that would be executed but to not actually make any changes");
addRequiredOption(retVal,"u", "url", "URL", "The JDBC database URL");
addRequiredOption(retVal,"n", "username", "Username", "The JDBC database username");
addRequiredOption(retVal,"p", "password", "Password", "The JDBC database password");
addRequiredOption(retVal,"f", "from", "Version", "The database schema version to migrate FROM");
addRequiredOption(retVal,"t", "to", "Version", "The database schema version to migrate TO");
addRequiredOption(retVal,"d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
return retVal;
}
private String driverOptions() {
return Arrays.stream(DriverTypeEnum.values()).map(Enum::name).collect(Collectors.joining(", "));
}
@Override
public List<String> provideUsageNotes() {
String versions = "The following versions are supported: " +
@ -45,5 +55,44 @@ public class BaseMigrateDatabaseCommand extends BaseCommand {
@Override
public void run(CommandLine theCommandLine) throws ParseException, ExecutionException {
String url = theCommandLine.getOptionValue("u");
String username = theCommandLine.getOptionValue("n");
String password = theCommandLine.getOptionValue("p");
DriverTypeEnum driverType;
String driverTypeString = theCommandLine.getOptionValue("d");
try {
driverType = DriverTypeEnum.valueOf(driverTypeString);
} catch (Exception e) {
throw new ParseException("Invalid driver type \"" + driverTypeString + "\". Valid values are: " + driverOptions());
}
VersionEnum from = parseVersion(theCommandLine, "f", "from");
VersionEnum to = parseVersion(theCommandLine, "t", "to");
boolean dryRun = theCommandLine.hasOption("r");
Migrator migrator = new Migrator();
migrator.setConnectionUrl(url);
migrator.setDriverType(driverType);
migrator.setUsername(username);
migrator.setPassword(password);
migrator.setDryRun(dryRun);
addTasks(migrator, from, to);
migrator.migrate();
}
@NotNull
private VersionEnum parseVersion(CommandLine theCommandLine, String theOptionName, String theOptionDesc) throws ParseException {
VersionEnum from;
String optionValue = theCommandLine.getOptionValue(theOptionName);
try {
from = VersionEnum.valueOf(optionValue);
} catch (Exception e) {
throw new ParseException("Invalid " + theOptionDesc+ " value: " + optionValue);
}
return from;
}
protected abstract void addTasks(Migrator theMigrator, VersionEnum theFrom, VersionEnum theTo);
}

View File

@ -0,0 +1,16 @@
package ca.uhn.fhir.cli;
import ca.uhn.fhir.jpa.migrate.Migrator;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import java.util.List;
public class HapiMigrateDatabaseCommand extends BaseMigrateDatabaseCommand {
@Override
protected void addTasks(Migrator theMigrator, VersionEnum theFrom, VersionEnum theTo) {
List<BaseTask<?>> tasks = new HapiFhirJpaMigrationTasks().getTasks(theFrom, theTo);
tasks.forEach(theMigrator::addTask);
}
}

View File

@ -38,6 +38,12 @@
<appender-ref ref="STDOUT" />
</logger>
<!--
Always log the migrator
-->
<logger name="ca.uhn.fhir.jpa.migrate" additivity="false" level="info">
<appender-ref ref="STDOUT" />
</logger>
<root level="warn">
<appender-ref ref="STDOUT" />

View File

@ -0,0 +1,93 @@
package ca.uhn.fhir.cli;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import com.google.common.base.Charsets;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.apache.commons.lang3.StringUtils.isBlank;
public class HapiMigrateDatabaseCommandTest {
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrateDatabaseCommandTest.class);
static {
System.setProperty("test", "true");
}
@Test
public void testMigrate() throws IOException {
File directory = new File("target/migrator_derby_test_340_350");
if (directory.exists()) {
FileUtils.deleteDirectory(directory);
}
String url = "jdbc:derby:directory:target/migrator_derby_test_340_350;create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "", "");
String script = IOUtils.toString(HapiMigrateDatabaseCommandTest.class.getResourceAsStream("/persistence_create_derby107_340.sql"), Charsets.UTF_8);
List<String> scriptStatements = new ArrayList<>(Arrays.asList(script.split("\n")));
for (int i = 0; i < scriptStatements.size(); i++) {
String nextStatement = scriptStatements.get(i);
if (isBlank(nextStatement)) {
scriptStatements.remove(i);
i--;
continue;
}
nextStatement = nextStatement.trim();
while (nextStatement.endsWith(";")) {
nextStatement = nextStatement.substring(0, nextStatement.length() - 1);
}
scriptStatements.set(i, nextStatement);
}
connectionProperties.getTxTemplate().execute(t -> {
for (String next : scriptStatements) {
connectionProperties.newJdbcTemplate().execute(next);
}
return null;
});
ourLog.info("**********************************************");
ourLog.info("Done Setup, Starting Dry Run...");
ourLog.info("**********************************************");
String[] args = new String[]{
"migrate-database",
"-d", "DERBY_EMBEDDED",
"-u", url,
"-n", "",
"-p", "",
"-r",
"-f", "V3_4_0",
"-t", "V3_5_0"
};
App.main(args);
ourLog.info("**********************************************");
ourLog.info("Done Setup, Starting Migration...");
ourLog.info("**********************************************");
args = new String[]{
"migrate-database",
"-d", "DERBY_EMBEDDED",
"-u", url,
"-n", "",
"-p", "",
"-f", "V3_4_0",
"-t", "V3_5_0"
};
App.main(args);
}
}

View File

@ -0,0 +1,156 @@
create sequence SEQ_CNCPT_MAP_GRP_ELM_TGT_PID start with 1 increment by 50;
create sequence SEQ_CODESYSTEM_PID start with 1 increment by 50;
create sequence SEQ_CODESYSTEMVER_PID start with 1 increment by 50;
create sequence SEQ_CONCEPT_DESIG_PID start with 1 increment by 50;
create sequence SEQ_CONCEPT_MAP_GROUP_PID start with 1 increment by 50;
create sequence SEQ_CONCEPT_MAP_GRP_ELM_PID start with 1 increment by 50;
create sequence SEQ_CONCEPT_MAP_PID start with 1 increment by 50;
create sequence SEQ_CONCEPT_PC_PID start with 1 increment by 50;
create sequence SEQ_CONCEPT_PID start with 1 increment by 50;
create sequence SEQ_CONCEPT_PROP_PID start with 1 increment by 50;
create sequence SEQ_FORCEDID_ID start with 1 increment by 50;
create sequence SEQ_HISTORYTAG_ID start with 1 increment by 50;
create sequence SEQ_IDXCMPSTRUNIQ_ID start with 1 increment by 50;
create sequence SEQ_RESLINK_ID start with 1 increment by 50;
create sequence SEQ_RESOURCE_HISTORY_ID start with 1 increment by 50;
create sequence SEQ_RESOURCE_ID start with 1 increment by 50;
create sequence SEQ_RESPARMPRESENT_ID start with 1 increment by 50;
create sequence SEQ_RESTAG_ID start with 1 increment by 50;
create sequence SEQ_SEARCH start with 1 increment by 50;
create sequence SEQ_SEARCH_INC start with 1 increment by 50;
create sequence SEQ_SEARCH_RES start with 1 increment by 50;
create sequence SEQ_SEARCHPARM_ID start with 1 increment by 50;
create sequence SEQ_SPIDX_COORDS start with 1 increment by 50;
create sequence SEQ_SPIDX_DATE start with 1 increment by 50;
create sequence SEQ_SPIDX_NUMBER start with 1 increment by 50;
create sequence SEQ_SPIDX_QUANTITY start with 1 increment by 50;
create sequence SEQ_SPIDX_STRING start with 1 increment by 50;
create sequence SEQ_SPIDX_TOKEN start with 1 increment by 50;
create sequence SEQ_SPIDX_URI start with 1 increment by 50;
create sequence SEQ_SUBSCRIPTION_ID start with 1 increment by 50;
create sequence SEQ_TAGDEF_ID start with 1 increment by 50;
create table HFJ_FORCED_ID (PID bigint not null, FORCED_ID varchar(100) not null, RESOURCE_PID bigint not null, RESOURCE_TYPE varchar(100) default '', primary key (PID));
create table HFJ_HISTORY_TAG (PID bigint not null, TAG_ID bigint, RES_ID bigint not null, RES_TYPE varchar(30) not null, RES_VER_PID bigint not null, primary key (PID));
create table HFJ_IDX_CMP_STRING_UNIQ (PID bigint not null, IDX_STRING varchar(150) not null, RES_ID bigint, primary key (PID));
create table HFJ_RES_LINK (PID bigint not null, SRC_PATH varchar(100) not null, SRC_RESOURCE_ID bigint not null, SOURCE_RESOURCE_TYPE varchar(30) default '' not null, TARGET_RESOURCE_ID bigint, TARGET_RESOURCE_TYPE varchar(30) default '' not null, TARGET_RESOURCE_URL varchar(200), SP_UPDATED timestamp, primary key (PID));
create table HFJ_RES_PARAM_PRESENT (PID bigint not null, SP_PRESENT boolean not null, RES_ID bigint not null, SP_ID bigint not null, primary key (PID));
create table HFJ_RES_TAG (PID bigint not null, TAG_ID bigint, RES_ID bigint, RES_TYPE varchar(30) not null, primary key (PID));
create table HFJ_RES_VER (PID bigint not null, RES_DELETED_AT timestamp, RES_VERSION varchar(7), HAS_TAGS boolean not null, RES_PUBLISHED timestamp not null, RES_UPDATED timestamp not null, RES_ENCODING varchar(5) not null, RES_TEXT blob, RES_ID bigint, RES_TYPE varchar(30) not null, RES_VER bigint not null, FORCED_ID_PID bigint, primary key (PID));
create table HFJ_RESOURCE (RES_ID bigint not null, RES_DELETED_AT timestamp, RES_VERSION varchar(7), HAS_TAGS boolean not null, RES_PUBLISHED timestamp not null, RES_UPDATED timestamp not null, SP_HAS_LINKS boolean, HASH_SHA256 varchar(64), SP_INDEX_STATUS bigint, RES_LANGUAGE varchar(20), SP_CMPSTR_UNIQ_PRESENT boolean, SP_COORDS_PRESENT boolean, SP_DATE_PRESENT boolean, SP_NUMBER_PRESENT boolean, SP_QUANTITY_PRESENT boolean, SP_STRING_PRESENT boolean, SP_TOKEN_PRESENT boolean, SP_URI_PRESENT boolean, RES_PROFILE varchar(200), RES_TYPE varchar(30), RES_VER bigint, FORCED_ID_PID bigint, primary key (RES_ID));
create table HFJ_SEARCH (PID bigint not null, CREATED timestamp not null, FAILURE_CODE integer, FAILURE_MESSAGE varchar(500), LAST_UPDATED_HIGH timestamp, LAST_UPDATED_LOW timestamp, NUM_FOUND integer not null, PREFERRED_PAGE_SIZE integer, RESOURCE_ID bigint, RESOURCE_TYPE varchar(200), SEARCH_LAST_RETURNED timestamp, SEARCH_QUERY_STRING clob(10000), SEARCH_QUERY_STRING_HASH integer, SEARCH_TYPE integer not null, SEARCH_STATUS varchar(10) not null, TOTAL_COUNT integer, SEARCH_UUID varchar(40) not null, primary key (PID));
create table HFJ_SEARCH_INCLUDE (PID bigint not null, SEARCH_INCLUDE varchar(200) not null, INC_RECURSE boolean not null, REVINCLUDE boolean not null, SEARCH_PID bigint not null, primary key (PID));
create table HFJ_SEARCH_PARM (PID bigint not null, PARAM_NAME varchar(100) not null, RES_TYPE varchar(30) not null, primary key (PID));
create table HFJ_SEARCH_RESULT (PID bigint not null, SEARCH_ORDER integer not null, RESOURCE_PID bigint not null, SEARCH_PID bigint not null, primary key (PID));
create table HFJ_SPIDX_COORDS (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, SP_LATITUDE double, SP_LONGITUDE double, primary key (SP_ID));
create table HFJ_SPIDX_DATE (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, SP_VALUE_HIGH timestamp, SP_VALUE_LOW timestamp, primary key (SP_ID));
create table HFJ_SPIDX_NUMBER (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, SP_VALUE numeric(19,2), primary key (SP_ID));
create table HFJ_SPIDX_QUANTITY (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, HASH_UNITS_AND_VALPREFIX bigint, HASH_VALPREFIX bigint, SP_SYSTEM varchar(200), SP_UNITS varchar(200), SP_VALUE numeric(19,2), primary key (SP_ID));
create table HFJ_SPIDX_STRING (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, HASH_EXACT bigint, HASH_NORM_PREFIX bigint, SP_VALUE_EXACT varchar(200), SP_VALUE_NORMALIZED varchar(200), primary key (SP_ID));
create table HFJ_SPIDX_TOKEN (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, HASH_SYS bigint, HASH_SYS_AND_VALUE bigint, HASH_VALUE bigint, SP_SYSTEM varchar(200), SP_VALUE varchar(200), primary key (SP_ID));
create table HFJ_SPIDX_URI (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, HASH_URI bigint, SP_URI varchar(255), primary key (SP_ID));
create table HFJ_SUBSCRIPTION_STATS (PID bigint not null, CREATED_TIME timestamp not null, RES_ID bigint, primary key (PID));
create table HFJ_TAG_DEF (TAG_ID bigint not null, TAG_CODE varchar(200), TAG_DISPLAY varchar(200), TAG_SYSTEM varchar(200), TAG_TYPE integer not null, primary key (TAG_ID));
create table TRM_CODESYSTEM (PID bigint not null, CODE_SYSTEM_URI varchar(255) not null, CS_NAME varchar(255), RES_ID bigint, CURRENT_VERSION_PID bigint, primary key (PID));
create table TRM_CODESYSTEM_VER (PID bigint not null, CS_VERSION_ID varchar(255), CODESYSTEM_PID bigint, RES_ID bigint not null, primary key (PID));
create table TRM_CONCEPT (PID bigint not null, CODE varchar(100) not null, CODESYSTEM_PID bigint, DISPLAY varchar(400), INDEX_STATUS bigint, CODE_SEQUENCE integer, primary key (PID));
create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CONCEPT_PID bigint, primary key (PID));
create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID));
create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID));
create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(100) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID));
create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(50) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID));
create table TRM_CONCEPT_PC_LINK (PID bigint not null, CHILD_PID bigint, PARENT_PID bigint, REL_TYPE integer, CODESYSTEM_PID bigint not null, primary key (PID));
create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CONCEPT_PID bigint, primary key (PID));
create index IDX_FORCEDID_TYPE_FORCEDID on HFJ_FORCED_ID (RESOURCE_TYPE, FORCED_ID);
create unique index IDX_FORCEDID_RESID on HFJ_FORCED_ID (RESOURCE_PID);
create unique index IDX_FORCEDID_TYPE_RESID on HFJ_FORCED_ID (RESOURCE_TYPE, RESOURCE_PID);
create unique index IDX_RESHISTTAG_TAGID on HFJ_HISTORY_TAG (RES_VER_PID, TAG_ID);
create index IDX_IDXCMPSTRUNIQ_RESOURCE on HFJ_IDX_CMP_STRING_UNIQ (RES_ID);
create unique index IDX_IDXCMPSTRUNIQ_STRING on HFJ_IDX_CMP_STRING_UNIQ (IDX_STRING);
create index IDX_RL_TPATHRES on HFJ_RES_LINK (SRC_PATH, TARGET_RESOURCE_ID);
create index IDX_RL_SRC on HFJ_RES_LINK (SRC_RESOURCE_ID);
create index IDX_RL_DEST on HFJ_RES_LINK (TARGET_RESOURCE_ID);
create index IDX_RESPARMPRESENT_RESID on HFJ_RES_PARAM_PRESENT (RES_ID);
create unique index IDX_RESPARMPRESENT_SPID_RESID on HFJ_RES_PARAM_PRESENT (SP_ID, RES_ID);
create unique index IDX_RESTAG_TAGID on HFJ_RES_TAG (RES_ID, TAG_ID);
create index IDX_RESVER_TYPE_DATE on HFJ_RES_VER (RES_TYPE, RES_UPDATED);
create index IDX_RESVER_ID_DATE on HFJ_RES_VER (RES_ID, RES_UPDATED);
create index IDX_RESVER_DATE on HFJ_RES_VER (RES_UPDATED);
create unique index IDX_RESVER_ID_VER on HFJ_RES_VER (RES_ID, RES_VER);
create index IDX_RES_DATE on HFJ_RESOURCE (RES_UPDATED);
create index IDX_RES_LANG on HFJ_RESOURCE (RES_TYPE, RES_LANGUAGE);
create index IDX_RES_PROFILE on HFJ_RESOURCE (RES_PROFILE);
create index IDX_RES_TYPE on HFJ_RESOURCE (RES_TYPE);
create index IDX_INDEXSTATUS on HFJ_RESOURCE (SP_INDEX_STATUS);
create index IDX_SEARCH_LASTRETURNED on HFJ_SEARCH (SEARCH_LAST_RETURNED);
create index IDX_SEARCH_RESTYPE_HASHS on HFJ_SEARCH (RESOURCE_TYPE, SEARCH_QUERY_STRING_HASH, CREATED);
create unique index IDX_SEARCH_UUID on HFJ_SEARCH (SEARCH_UUID);
create unique index IDX_SEARCHPARM_RESTYPE_SPNAME on HFJ_SEARCH_PARM (RES_TYPE, PARAM_NAME);
create unique index IDX_SEARCHRES_ORDER on HFJ_SEARCH_RESULT (SEARCH_PID, SEARCH_ORDER);
create index IDX_SP_COORDS on HFJ_SPIDX_COORDS (RES_TYPE, SP_NAME, SP_LATITUDE, SP_LONGITUDE);
create index IDX_SP_COORDS_UPDATED on HFJ_SPIDX_COORDS (SP_UPDATED);
create index IDX_SP_COORDS_RESID on HFJ_SPIDX_COORDS (RES_ID);
create index IDX_SP_DATE on HFJ_SPIDX_DATE (RES_TYPE, SP_NAME, SP_VALUE_LOW, SP_VALUE_HIGH);
create index IDX_SP_DATE_UPDATED on HFJ_SPIDX_DATE (SP_UPDATED);
create index IDX_SP_DATE_RESID on HFJ_SPIDX_DATE (RES_ID);
create index IDX_SP_NUMBER on HFJ_SPIDX_NUMBER (RES_TYPE, SP_NAME, SP_VALUE);
create index IDX_SP_NUMBER_UPDATED on HFJ_SPIDX_NUMBER (SP_UPDATED);
create index IDX_SP_NUMBER_RESID on HFJ_SPIDX_NUMBER (RES_ID);
create index IDX_SP_QUANTITY on HFJ_SPIDX_QUANTITY (RES_TYPE, SP_NAME, SP_SYSTEM, SP_UNITS, SP_VALUE);
create index IDX_SP_QUANTITY_UPDATED on HFJ_SPIDX_QUANTITY (SP_UPDATED);
create index IDX_SP_QUANTITY_RESID on HFJ_SPIDX_QUANTITY (RES_ID);
create index IDX_SP_STRING on HFJ_SPIDX_STRING (RES_TYPE, SP_NAME, SP_VALUE_NORMALIZED);
create index IDX_SP_STRING_UPDATED on HFJ_SPIDX_STRING (SP_UPDATED);
create index IDX_SP_STRING_RESID on HFJ_SPIDX_STRING (RES_ID);
create index IDX_SP_TOKEN on HFJ_SPIDX_TOKEN (RES_TYPE, SP_NAME, SP_SYSTEM, SP_VALUE);
create index IDX_SP_TOKEN_UNQUAL on HFJ_SPIDX_TOKEN (RES_TYPE, SP_NAME, SP_VALUE);
create index IDX_SP_TOKEN_UPDATED on HFJ_SPIDX_TOKEN (SP_UPDATED);
create index IDX_SP_TOKEN_RESID on HFJ_SPIDX_TOKEN (RES_ID);
create index IDX_SP_URI on HFJ_SPIDX_URI (RES_TYPE, SP_NAME, SP_URI);
create index IDX_SP_URI_RESTYPE_NAME on HFJ_SPIDX_URI (RES_TYPE, SP_NAME);
create index IDX_SP_URI_UPDATED on HFJ_SPIDX_URI (SP_UPDATED);
create index IDX_SP_URI_COORDS on HFJ_SPIDX_URI (RES_ID);
create unique index IDX_SUBSC_RESID on HFJ_SUBSCRIPTION_STATS (RES_ID);
create unique index IDX_TAGDEF_TYPESYSCODE on HFJ_TAG_DEF (TAG_TYPE, TAG_SYSTEM, TAG_CODE);
create unique index IDX_CS_CODESYSTEM on TRM_CODESYSTEM (CODE_SYSTEM_URI);
create index IDX_CONCEPT_INDEXSTATUS on TRM_CONCEPT (INDEX_STATUS);
create unique index IDX_CONCEPT_CS_CODE on TRM_CONCEPT (CODESYSTEM_PID, CODE);
create unique index IDX_CONCEPT_MAP_URL on TRM_CONCEPT_MAP (URL);
create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE);
create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE);
alter table HFJ_FORCED_ID add constraint FK_FORCEDID_RESOURCE foreign key (RESOURCE_PID) references HFJ_RESOURCE;
alter table HFJ_HISTORY_TAG add constraint FKtderym7awj6q8iq5c51xv4ndw foreign key (TAG_ID) references HFJ_TAG_DEF;
alter table HFJ_HISTORY_TAG add constraint FK_HISTORYTAG_HISTORY foreign key (RES_VER_PID) references HFJ_RES_VER;
alter table HFJ_IDX_CMP_STRING_UNIQ add constraint FK_IDXCMPSTRUNIQ_RES_ID foreign key (RES_ID) references HFJ_RESOURCE;
alter table HFJ_RES_LINK add constraint FK_RESLINK_SOURCE foreign key (SRC_RESOURCE_ID) references HFJ_RESOURCE;
alter table HFJ_RES_LINK add constraint FK_RESLINK_TARGET foreign key (TARGET_RESOURCE_ID) references HFJ_RESOURCE;
alter table HFJ_RES_PARAM_PRESENT add constraint FK_RESPARMPRES_RESID foreign key (RES_ID) references HFJ_RESOURCE;
alter table HFJ_RES_PARAM_PRESENT add constraint FK_RESPARMPRES_SPID foreign key (SP_ID) references HFJ_SEARCH_PARM;
alter table HFJ_RES_TAG add constraint FKbfcjbaftmiwr3rxkwsy23vneo foreign key (TAG_ID) references HFJ_TAG_DEF;
alter table HFJ_RES_TAG add constraint FK_RESTAG_RESOURCE foreign key (RES_ID) references HFJ_RESOURCE;
alter table HFJ_RES_VER add constraint FKh20i7lcbchkaxekvwg9ix4hc5 foreign key (FORCED_ID_PID) references HFJ_FORCED_ID;
alter table HFJ_RESOURCE add constraint FKhjgj8cp879gfxko25cx5o692r foreign key (FORCED_ID_PID) references HFJ_FORCED_ID;
alter table HFJ_SEARCH_INCLUDE add constraint FK_SEARCHINC_SEARCH foreign key (SEARCH_PID) references HFJ_SEARCH;
alter table HFJ_SEARCH_RESULT add constraint FK_SEARCHRES_RES foreign key (RESOURCE_PID) references HFJ_RESOURCE;
alter table HFJ_SEARCH_RESULT add constraint FK_SEARCHRES_SEARCH foreign key (SEARCH_PID) references HFJ_SEARCH;
alter table HFJ_SPIDX_COORDS add constraint FKc97mpk37okwu8qvtceg2nh9vn foreign key (RES_ID) references HFJ_RESOURCE;
alter table HFJ_SPIDX_DATE add constraint FK17s70oa59rm9n61k9thjqrsqm foreign key (RES_ID) references HFJ_RESOURCE;
alter table HFJ_SPIDX_NUMBER add constraint FKcltihnc5tgprj9bhpt7xi5otb foreign key (RES_ID) references HFJ_RESOURCE;
alter table HFJ_SPIDX_QUANTITY add constraint FKn603wjjoi1a6asewxbbd78bi5 foreign key (RES_ID) references HFJ_RESOURCE;
alter table HFJ_SPIDX_STRING add constraint FK_SPIDXSTR_RESOURCE foreign key (RES_ID) references HFJ_RESOURCE;
alter table HFJ_SPIDX_TOKEN add constraint FK7ulx3j1gg3v7maqrejgc7ybc4 foreign key (RES_ID) references HFJ_RESOURCE;
alter table HFJ_SPIDX_URI add constraint FKgxsreutymmfjuwdswv3y887do foreign key (RES_ID) references HFJ_RESOURCE;
alter table HFJ_SUBSCRIPTION_STATS add constraint FK_SUBSC_RESOURCE_ID foreign key (RES_ID) references HFJ_RESOURCE;
alter table TRM_CODESYSTEM add constraint FK_TRMCODESYSTEM_CURVER foreign key (CURRENT_VERSION_PID) references TRM_CODESYSTEM_VER;
alter table TRM_CODESYSTEM add constraint FK_TRMCODESYSTEM_RES foreign key (RES_ID) references HFJ_RESOURCE;
alter table TRM_CODESYSTEM_VER add constraint FK_CODESYSVER_CS_ID foreign key (CODESYSTEM_PID) references TRM_CODESYSTEM;
alter table TRM_CODESYSTEM_VER add constraint FK_CODESYSVER_RES_ID foreign key (RES_ID) references HFJ_RESOURCE;
alter table TRM_CONCEPT add constraint FK_CONCEPT_PID_CS_PID foreign key (CODESYSTEM_PID) references TRM_CODESYSTEM_VER;
alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT;
alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE;
alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP;
alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP;
alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT;
alter table TRM_CONCEPT_PC_LINK add constraint FK_TERM_CONCEPTPC_CHILD foreign key (CHILD_PID) references TRM_CONCEPT;
alter table TRM_CONCEPT_PC_LINK add constraint FK_TERM_CONCEPTPC_CS foreign key (CODESYSTEM_PID) references TRM_CODESYSTEM_VER;
alter table TRM_CONCEPT_PC_LINK add constraint FK_TERM_CONCEPTPC_PARENT foreign key (PARENT_PID) references TRM_CONCEPT;
alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT;

View File

@ -47,13 +47,14 @@ public enum DriverTypeEnum {
}
public ConnectionProperties newJdbcTemplate(String theUrl, String theUsername, String thePassword) {
public ConnectionProperties newConnectionProperties(String theUrl, String theUsername, String thePassword) {
SingleConnectionDataSource dataSource = new SingleConnectionDataSource();
dataSource.setAutoCommit(false);
dataSource.setDriverClassName(myDriverClassName);
dataSource.setUrl(theUrl);
dataSource.setUsername(theUsername);
dataSource.setPassword(thePassword);
dataSource.setSuppressClose(true);
DataSourceTransactionManager transactionManager = new DataSourceTransactionManager();
transactionManager.setDataSource(dataSource);

View File

@ -19,6 +19,7 @@ public class Migrator {
private List<BaseTask> myTasks = new ArrayList<>();
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
private int myChangesCount;
private boolean myDryRun;
public int getChangesCount() {
return myChangesCount;
@ -44,14 +45,19 @@ public class Migrator {
myTasks.add(theTask);
}
public void setDryRun(boolean theDryRun) {
myDryRun = theDryRun;
}
public void migrate() {
ourLog.info("Starting migration with {} tasks", myTasks.size());
myConnectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newJdbcTemplate(myConnectionUrl, myUsername, myPassword);
myConnectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(myConnectionUrl, myUsername, myPassword);
try {
for (BaseTask next : myTasks) {
next.setDriverType(myDriverType);
next.setConnectionProperties(myConnectionProperties);
next.setDryRun(myDryRun);
try {
next.execute();
} catch (SQLException e) {

View File

@ -67,6 +67,11 @@ public class ArbitrarySqlTask extends BaseTask<ArbitrarySqlTask> {
@Override
public void execute() {
if (isDryRun()) {
logDryRunSql(mySql);
return;
}
List<Map<String, Object>> rows;
do {
ourLog.info("Querying for up to {} rows", myBatchSize);

View File

@ -32,6 +32,13 @@ public abstract class BaseTableColumnTypeTask<T extends BaseTableTask> extends B
setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.MSSQL_2012, "varchar(?)");
setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.ORACLE_12C, "varchar2(?)");
setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.POSTGRES_9_4, "varchar(?)");
setColumnType(ColumnTypeEnum.DATE_TIMESTAMPT, DriverTypeEnum.DERBY_EMBEDDED, "timestamp");
setColumnType(ColumnTypeEnum.DATE_TIMESTAMPT, DriverTypeEnum.MARIADB_10_1, "datetime(6)");
setColumnType(ColumnTypeEnum.DATE_TIMESTAMPT, DriverTypeEnum.MYSQL_5_7, "datetime(6)");
setColumnType(ColumnTypeEnum.DATE_TIMESTAMPT, DriverTypeEnum.MSSQL_2012, "datetime2");
setColumnType(ColumnTypeEnum.DATE_TIMESTAMPT, DriverTypeEnum.ORACLE_12C, "timestamp");
setColumnType(ColumnTypeEnum.DATE_TIMESTAMPT, DriverTypeEnum.POSTGRES_9_4, "timestamp");
}
public ColumnTypeEnum getColumnType() {
@ -113,6 +120,13 @@ public abstract class BaseTableColumnTypeTask<T extends BaseTableTask> extends B
Assert.isTrue(theColumnLength != null, "Must supply a column length");
return "varchar(" + theColumnLength + ")";
}
},
DATE_TIMESTAMPT{
@Override
public String getDescriptor(Long theColumnLength) {
Assert.isTrue(theColumnLength == null, "Must not supply a column length");
return "timestamp";
}
};
public abstract String getDescriptor(Long theColumnLength);

View File

@ -16,11 +16,21 @@ public abstract class BaseTask<T extends BaseTask> {
private DriverTypeEnum myDriverType;
private String myDescription;
private int myChangesCount;
private boolean myDryRun;
public boolean isDryRun() {
return myDryRun;
}
public void setDryRun(boolean theDryRun) {
myDryRun = theDryRun;
}
public String getDescription() {
return myDescription;
}
@SuppressWarnings("unchecked")
public T setDescription(String theDescription) {
myDescription = theDescription;
return (T) this;
@ -31,6 +41,11 @@ public abstract class BaseTask<T extends BaseTask> {
}
public void executeSql(@Language("SQL") String theSql, Object... theArguments) {
if (isDryRun()) {
logDryRunSql(theSql);
return;
}
Integer changes = getConnectionProperties().getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
int changesCount = jdbcTemplate.update(theSql, theArguments);
@ -42,6 +57,10 @@ public abstract class BaseTask<T extends BaseTask> {
}
protected void logDryRunSql(@Language("SQL") String theSql) {
ourLog.info("WOULD EXECUTE SQL: {}", theSql);
}
public DriverTypeEnum.ConnectionProperties getConnectionProperties() {
return myConnectionProperties;
}

View File

@ -27,6 +27,10 @@ public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask
@Override
public void execute() {
if (isDryRun()) {
return;
}
List<Map<String, Object>> rows;
do {
rows = getTxTemplate().execute(t -> {

View File

@ -0,0 +1,27 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LogStartSectionWithMessageTask extends BaseTask {
private static final Logger ourLog = LoggerFactory.getLogger(LogStartSectionWithMessageTask.class);
private final String myMessage;
public LogStartSectionWithMessageTask(String theMessage) {
myMessage = theMessage;
}
@Override
public void validate() {
// nothing
}
@Override
public void execute() {
ourLog.info("");
ourLog.info(StringUtils.leftPad("", myMessage.length(), "*"));
ourLog.info(myMessage);
ourLog.info(StringUtils.leftPad("", myMessage.length(), "*"));
}
}

View File

@ -3,24 +3,29 @@ package ca.uhn.fhir.jpa.migrate.tasks;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.entity.*;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.*;
import ca.uhn.fhir.jpa.migrate.taskdef.AddColumnTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTableColumnTypeTask;
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import com.google.common.collect.Multimap;
import com.google.common.collect.MultimapBuilder;
import org.intellij.lang.annotations.Language;
@SuppressWarnings("UnstableApiUsage")
public class HapiFhirJpaMigrationTasks {
private Multimap<VersionEnum, BaseTask<?>> myTasks = MultimapBuilder.hashKeys().arrayListValues().build();
@SuppressWarnings({"UnstableApiUsage", "SqlNoDataSourceInspection", "SpellCheckingInspection"})
public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
/**
* Constructor
*/
public HapiFhirJpaMigrationTasks() {
init350();
}
private void init350() {
Builder version = forVersion(VersionEnum.V3_5_0);
// Forced ID changes
Builder.BuilderWithTableName forcedId = forVersion(VersionEnum.V3_5_0).onTable("HFJ_FORCED_ID");
Builder.BuilderWithTableName forcedId = version.onTable("HFJ_FORCED_ID");
version.startSectionWithMessage("Starting work on table: " + forcedId.getTableName());
forcedId
.dropIndex("IDX_FORCEDID_TYPE_FORCEDID");
forcedId
@ -31,9 +36,10 @@ public class HapiFhirJpaMigrationTasks {
.withColumns("RESOURCE_TYPE", "FORCED_ID");
// Indexes - Coords
Builder.BuilderWithTableName spidxCoords = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_COORDS");
Builder.BuilderWithTableName spidxCoords = version.onTable("HFJ_SPIDX_COORDS");
version.startSectionWithMessage("Starting work on table: " + spidxCoords.getTableName());
spidxCoords
.dropIndex("IDX_SP_COORDS_HASH");
.dropIndex("IDX_SP_COORDS");
spidxCoords
.addColumn("HASH_IDENTITY")
.nullable()
@ -41,7 +47,7 @@ public class HapiFhirJpaMigrationTasks {
spidxCoords
.addIndex("IDX_SP_COORDS_HASH")
.unique(false)
.withColumns("HASH_IDENTITY", "SP_VALUE", "SP_LATITUDE", "SP_LONGITUDE");
.withColumns("HASH_IDENTITY", "SP_LATITUDE", "SP_LONGITUDE");
spidxCoords
.addTask(new CalculateHashesTask()
.setColumnName("HASH_IDENTITY")
@ -49,7 +55,8 @@ public class HapiFhirJpaMigrationTasks {
);
// Indexes - Date
Builder.BuilderWithTableName spidxDate = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_DATE");
Builder.BuilderWithTableName spidxDate = version.onTable("HFJ_SPIDX_DATE");
version.startSectionWithMessage("Starting work on table: " + spidxDate.getTableName());
spidxDate
.dropIndex("IDX_SP_TOKEN");
spidxDate
@ -67,7 +74,8 @@ public class HapiFhirJpaMigrationTasks {
);
// Indexes - Number
Builder.BuilderWithTableName spidxNumber = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_NUMBER");
Builder.BuilderWithTableName spidxNumber = version.onTable("HFJ_SPIDX_NUMBER");
version.startSectionWithMessage("Starting work on table: " + spidxNumber.getTableName());
spidxNumber
.dropIndex("IDX_SP_NUMBER");
spidxNumber
@ -85,7 +93,8 @@ public class HapiFhirJpaMigrationTasks {
);
// Indexes - Quantity
Builder.BuilderWithTableName spidxQuantity = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_QUANTITY");
Builder.BuilderWithTableName spidxQuantity = version.onTable("HFJ_SPIDX_QUANTITY");
version.startSectionWithMessage("Starting work on table: " + spidxQuantity.getTableName());
spidxQuantity
.dropIndex("IDX_SP_QUANTITY");
spidxQuantity
@ -121,17 +130,14 @@ public class HapiFhirJpaMigrationTasks {
);
// Indexes - String
Builder.BuilderWithTableName spidxString = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_STRING");
Builder.BuilderWithTableName spidxString = version.onTable("HFJ_SPIDX_STRING");
version.startSectionWithMessage("Starting work on table: " + spidxString.getTableName());
spidxString
.dropIndex("IDX_SP_STRING");
spidxString
.addColumn("HASH_NORM_PREFIX")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
spidxString
.addColumn("HASH_NORM")
.nullable()
.type(AddColumnTask.ColumnTypeEnum.LONG);
spidxString
.addIndex("IDX_SP_STRING_HASH_NRM")
.unique(false)
@ -142,13 +148,14 @@ public class HapiFhirJpaMigrationTasks {
.withColumns("HASH_EXACT");
spidxString
.addTask(new CalculateHashesTask()
.setColumnName("HASH_IDENTITY")
.addCalculator("IDX_SP_STRING_HASH_NRM", t -> ResourceIndexedSearchParamString.calculateHashNormalized(new DaoConfig(), t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_VALUE_NORMALIZED")))
.addCalculator("IDX_SP_STRING_HASH_EXCT", t -> ResourceIndexedSearchParamString.calculateHashExact(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE_EXACT")))
.setColumnName("HASH_NORM_PREFIX")
.addCalculator("HASH_NORM_PREFIX", t -> ResourceIndexedSearchParamString.calculateHashNormalized(new DaoConfig(), t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_VALUE_NORMALIZED")))
.addCalculator("HASH_EXACT", t -> ResourceIndexedSearchParamString.calculateHashExact(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE_EXACT")))
);
// Indexes - Token
Builder.BuilderWithTableName spidxToken = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_TOKEN");
Builder.BuilderWithTableName spidxToken = version.onTable("HFJ_SPIDX_TOKEN");
version.startSectionWithMessage("Starting work on table: " + spidxToken.getTableName());
spidxToken
.dropIndex("IDX_SP_TOKEN");
spidxToken
@ -195,7 +202,8 @@ public class HapiFhirJpaMigrationTasks {
);
// Indexes - URI
Builder.BuilderWithTableName spidxUri = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_URI");
Builder.BuilderWithTableName spidxUri = version.onTable("HFJ_SPIDX_URI");
version.startSectionWithMessage("Starting work on table: " + spidxUri.getTableName());
spidxUri
.addColumn("HASH_IDENTITY")
.nullable()
@ -216,7 +224,8 @@ public class HapiFhirJpaMigrationTasks {
);
// Search Parameter Presence
Builder.BuilderWithTableName spp = forVersion(VersionEnum.V3_5_0).onTable("HFJ_RES_PARAM_PRESENT");
Builder.BuilderWithTableName spp = version.onTable("HFJ_RES_PARAM_PRESENT");
version.startSectionWithMessage("Starting work on table: " + spp.getTableName());
spp.dropIndex("IDX_RESPARMPRESENT_SPID_RESID");
spp
.addColumn("HASH_PRESENCE")
@ -230,22 +239,27 @@ public class HapiFhirJpaMigrationTasks {
consolidateSearchParamPresenceIndexesTask.setBatchSize(1);
String sql = "SELECT " +
"HFJ_SEARCH_PARM.RES_TYPE RES_TYPE, HFJ_SEARCH_PARM.PARAM_NAME PARAM_NAME, " +
"HFJ_RES_PARAM_PRESENT.PID PID, HFJ_RES_PARAM_PRESENT.SP_ID SP_ID, HFJ_RES_PARAM_PRESENT.SP_PRESENT SP_PRESENT, HFJ_RES_PARAM_PRESENT.HASH_PRESENT HASH_PRESENT " +
"HFJ_RES_PARAM_PRESENT.PID PID, HFJ_RES_PARAM_PRESENT.SP_ID SP_ID, HFJ_RES_PARAM_PRESENT.SP_PRESENT SP_PRESENT, HFJ_RES_PARAM_PRESENT.HASH_PRESENCE HASH_PRESENCE " +
"from HFJ_RES_PARAM_PRESENT " +
"join HFJ_SEARCH_PARM ON (HFJ_SEARCH_PARM.PID = HFJ_RES_PARAM_PRESENT.SP_ID) " +
"where HFJ_RES_PARAM_PRESENT.HASH_PRESENT is null";
"where HFJ_RES_PARAM_PRESENT.HASH_PRESENCE is null";
consolidateSearchParamPresenceIndexesTask.addQuery(sql, ArbitrarySqlTask.QueryModeEnum.BATCH_UNTIL_NO_MORE, t -> {
Long pid = (Long) t.get("PID");
Boolean present = (Boolean) t.get("SP_PRESENT");
Boolean present = (Boolean) t.get("HASH_PRESENCE");
String resType = (String) t.get("RES_TYPE");
String paramName = (String) t.get("PARAM_NAME");
Long hash = SearchParamPresent.calculateHashPresence(resType, paramName, present);
consolidateSearchParamPresenceIndexesTask.executeSql("update HFJ_RES_PARAM_PRESENT set HASH_PRESENCE = ? where PID = ?", hash, pid);
});
forVersion(VersionEnum.V3_5_0).addTask(consolidateSearchParamPresenceIndexesTask);
version.addTask(consolidateSearchParamPresenceIndexesTask);
// Concept
Builder.BuilderWithTableName trmConcept = forVersion(VersionEnum.V3_5_0).onTable("TRM_CONCEPT");
Builder.BuilderWithTableName trmConcept = version.onTable("TRM_CONCEPT");
version.startSectionWithMessage("Starting work on table: " + trmConcept.getTableName());
trmConcept
.addColumn("CONCEPT_UPDATED")
.nullable()
.type(BaseTableColumnTypeTask.ColumnTypeEnum.DATE_TIMESTAMPT);
trmConcept
.addIndex("IDX_CONCEPT_UPDATED")
.unique(false)
@ -256,7 +270,8 @@ public class HapiFhirJpaMigrationTasks {
.withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500);
// Concept Designation
forVersion(VersionEnum.V3_5_0)
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_DESIG");
version
.addTable("TRM_CONCEPT_DESIG")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
@ -278,8 +293,9 @@ public class HapiFhirJpaMigrationTasks {
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT");
// Concept Property
forVersion(VersionEnum.V3_5_0)
.addTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_PROPERTY");
version
.addTable("TRM_CONCEPT_PROPERTY")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
@ -300,7 +316,8 @@ public class HapiFhirJpaMigrationTasks {
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT");
// Concept Map - Map
forVersion(VersionEnum.V3_5_0)
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP");
version
.addTable("TRM_CONCEPT_MAP")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE")
@ -321,7 +338,8 @@ public class HapiFhirJpaMigrationTasks {
.addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)");
// Concept Map - Group
forVersion(VersionEnum.V3_5_0)
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GROUP");
version
.addTable("TRM_CONCEPT_MAP_GROUP")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP")
@ -338,7 +356,8 @@ public class HapiFhirJpaMigrationTasks {
.addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP");
// Concept Map - Group Element
forVersion(VersionEnum.V3_5_0)
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GRP_ELEMENT");
version
.addTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP")
@ -360,7 +379,8 @@ public class HapiFhirJpaMigrationTasks {
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP");
// Concept Map - Group Element Target
forVersion(VersionEnum.V3_5_0)
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GRP_ELM_TGT");
version
.addTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT")
@ -380,156 +400,7 @@ public class HapiFhirJpaMigrationTasks {
.addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.MSSQL_2012, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")
.addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT");
}
private Builder forVersion(VersionEnum theVersion) {
return new Builder(theVersion);
}
private class Builder {
private final VersionEnum myVersion;
private String myTableName;
public Builder(VersionEnum theVersion) {
myVersion = theVersion;
}
public BuilderWithTableName onTable(String theTableName) {
myTableName = theTableName;
return new BuilderWithTableName();
}
private void addTask(BaseTask theTask) {
theTask.validate();
myTasks.put(myVersion, theTask);
}
public BuilderAddTable addTable(String theTableName) {
myTableName = theTableName;
return new BuilderAddTable();
}
private class BuilderWithTableName {
private String myIndexName;
private String myColumnName;
void dropIndex(String theIndexName) {
DropIndexTask task = new DropIndexTask();
task.setIndexName(theIndexName);
task.setTableName(myTableName);
addTask(task);
}
public BuilderAddIndexWithName addIndex(String theIndexName) {
myIndexName = theIndexName;
return new BuilderAddIndexWithName();
}
public BuilderAddColumnWithName addColumn(String theColumnName) {
myColumnName = theColumnName;
return new BuilderAddColumnWithName();
}
public void addTask(BaseTableTask<?> theTask) {
theTask.setTableName(myTableName);
Builder.this.addTask(theTask);
}
public BuilderModifyColumnWithName modifyColumn(String theColumnName) {
myColumnName = theColumnName;
return new BuilderModifyColumnWithName();
}
private class BuilderAddIndexWithName {
private boolean myUnique;
public BuilderAddIndexUnique unique(boolean theUnique) {
myUnique = theUnique;
return new BuilderAddIndexUnique();
}
private class BuilderAddIndexUnique {
public void withColumns(String... theColumnNames) {
AddIndexTask task = new AddIndexTask();
task.setTableName(myTableName);
task.setIndexName(myIndexName);
task.setUnique(myUnique);
task.setColumns(theColumnNames);
addTask(task);
}
}
}
private class BuilderAddColumnWithName {
private boolean myNullable;
public BuilderAddColumnWithNameNullable nullable() {
myNullable = true;
return new BuilderAddColumnWithNameNullable();
}
private class BuilderAddColumnWithNameNullable {
public void type(AddColumnTask.ColumnTypeEnum theColumnType) {
AddColumnTask task = new AddColumnTask();
task.setColumnName(myColumnName);
task.setNullable(myNullable);
task.setColumnType(theColumnType);
addTask(task);
}
}
}
private class BuilderModifyColumnWithName {
private boolean myNullable;
public BuilderModifyColumnWithNameAndNullable nullable() {
myNullable = true;
return new BuilderModifyColumnWithNameAndNullable();
}
public BuilderModifyColumnWithNameAndNullable nonNullable() {
myNullable = false;
return new BuilderModifyColumnWithNameAndNullable();
}
private class BuilderModifyColumnWithNameAndNullable {
public void withType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType, int theLength) {
if (theColumnType == BaseTableColumnTypeTask.ColumnTypeEnum.STRING) {
ModifyColumnTask task = new ModifyColumnTask();
task.setColumnName(myColumnName);
task.setTableName(myTableName);
task.setColumnLength(theLength);
task.setNullable(myNullable);
addTask(task);
} else {
throw new IllegalArgumentException("Can not specify length for column of type " + theColumnType);
}
}
}
}
}
private class BuilderAddTable {
private final AddTableTask myTask;
private BuilderAddTable() {
myTask = new AddTableTask();
myTask.setTableName(myTableName);
}
public BuilderAddTable addSql(DriverTypeEnum theDriverTypeEnum, @Language("SQL") String theSql) {
myTask.addSql(theDriverTypeEnum, theSql);
return this;
}
}
}
}

View File

@ -0,0 +1,203 @@
package ca.uhn.fhir.jpa.migrate.tasks.api;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.*;
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import com.google.common.collect.Multimap;
import com.google.common.collect.MultimapBuilder;
import org.apache.commons.lang3.Validate;
import org.intellij.lang.annotations.Language;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
public class BaseMigrationTasks {
private Multimap<VersionEnum, BaseTask<?>> myTasks = MultimapBuilder.hashKeys().arrayListValues().build();
public List<BaseTask<?>> getTasks(@Nonnull VersionEnum theFrom, @Nonnull VersionEnum theTo) {
Validate.notNull(theFrom);
Validate.notNull(theTo);
Validate.isTrue(theFrom.ordinal() < theTo.ordinal(), "From version must be lower than to version");
List<BaseTask<?>> retVal = new ArrayList<>();
for (VersionEnum nextVersion : VersionEnum.values()) {
if (nextVersion.ordinal() <= theFrom.ordinal()) {
continue;
}
if (nextVersion.ordinal() > theTo.ordinal()) {
continue;
}
Collection<BaseTask<?>> nextValues = myTasks.get(nextVersion);
if (nextValues != null) {
retVal.addAll(nextValues);
}
}
return retVal;
}
protected HapiFhirJpaMigrationTasks.Builder forVersion(VersionEnum theVersion) {
return new HapiFhirJpaMigrationTasks.Builder(theVersion);
}
protected class Builder {
private final VersionEnum myVersion;
private String myTableName;
Builder(VersionEnum theVersion) {
myVersion = theVersion;
}
public BuilderWithTableName onTable(String theTableName) {
myTableName = theTableName;
return new BuilderWithTableName();
}
public void addTask(BaseTask theTask) {
theTask.validate();
myTasks.put(myVersion, theTask);
}
public BuilderAddTable addTable(String theTableName) {
myTableName = theTableName;
return new BuilderAddTable();
}
public void startSectionWithMessage(String theMessage) {
Validate.notBlank(theMessage);
addTask(new LogStartSectionWithMessageTask(theMessage));
}
public class BuilderWithTableName {
private String myIndexName;
private String myColumnName;
public String getTableName() {
return myTableName;
}
public void dropIndex(String theIndexName) {
DropIndexTask task = new DropIndexTask();
task.setIndexName(theIndexName);
task.setTableName(myTableName);
addTask(task);
}
public BuilderAddIndexWithName addIndex(String theIndexName) {
myIndexName = theIndexName;
return new BuilderAddIndexWithName();
}
public BuilderAddColumnWithName addColumn(String theColumnName) {
myColumnName = theColumnName;
return new BuilderAddColumnWithName();
}
public void addTask(BaseTableTask<?> theTask) {
theTask.setTableName(myTableName);
Builder.this.addTask(theTask);
}
public BuilderModifyColumnWithName modifyColumn(String theColumnName) {
myColumnName = theColumnName;
return new BuilderModifyColumnWithName();
}
public class BuilderAddIndexWithName {
private boolean myUnique;
public BuilderAddIndexUnique unique(boolean theUnique) {
myUnique = theUnique;
return new BuilderAddIndexUnique();
}
public class BuilderAddIndexUnique {
public void withColumns(String... theColumnNames) {
AddIndexTask task = new AddIndexTask();
task.setTableName(myTableName);
task.setIndexName(myIndexName);
task.setUnique(myUnique);
task.setColumns(theColumnNames);
addTask(task);
}
}
}
public class BuilderAddColumnWithName {
private boolean myNullable;
public BuilderAddColumnWithNameNullable nullable() {
myNullable = true;
return new BuilderAddColumnWithNameNullable();
}
public class BuilderAddColumnWithNameNullable {
public void type(AddColumnTask.ColumnTypeEnum theColumnType) {
AddColumnTask task = new AddColumnTask();
task.setColumnName(myColumnName);
task.setNullable(myNullable);
task.setColumnType(theColumnType);
addTask(task);
}
}
}
public class BuilderModifyColumnWithName {
private boolean myNullable;
public BuilderModifyColumnWithNameAndNullable nullable() {
myNullable = true;
return new BuilderModifyColumnWithNameAndNullable();
}
public BuilderModifyColumnWithNameAndNullable nonNullable() {
myNullable = false;
return new BuilderModifyColumnWithNameAndNullable();
}
public class BuilderModifyColumnWithNameAndNullable {
public void withType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType, int theLength) {
if (theColumnType == BaseTableColumnTypeTask.ColumnTypeEnum.STRING) {
ModifyColumnTask task = new ModifyColumnTask();
task.setColumnName(myColumnName);
task.setTableName(myTableName);
task.setColumnLength(theLength);
task.setNullable(myNullable);
task.setColumnType(theColumnType);
addTask(task);
} else {
throw new IllegalArgumentException("Can not specify length for column of type " + theColumnType);
}
}
}
}
}
public class BuilderAddTable {
private final AddTableTask myTask;
protected BuilderAddTable() {
myTask = new AddTableTask();
myTask.setTableName(myTableName);
addTask(myTask);
}
public BuilderAddTable addSql(DriverTypeEnum theDriverTypeEnum, @Language("SQL") String theSql) {
myTask.addSql(theDriverTypeEnum, theSql);
return this;
}
}
}
}

View File

@ -52,7 +52,7 @@ public class BaseTest {
public void before() {
myUrl = "jdbc:derby:memory:database " + (ourDatabaseUrl++) + ";create=true";
myConnectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newJdbcTemplate(myUrl, "SA", "SA");
myConnectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(myUrl, "SA", "SA");
myMigrator = new Migrator();
myMigrator.setConnectionUrl(myUrl);

20
pom.xml
View File

@ -736,6 +736,11 @@
<artifactId>gson</artifactId>
<version>${gson_version}</version>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
<version>6.2.2.jre8</version>
</dependency>
<dependency>
<groupId>com.sun.xml.bind</groupId>
<artifactId>jaxb-core</artifactId>
@ -781,6 +786,11 @@
<artifactId>reflow-velocity-tools</artifactId>
<version>1.1.1</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>6.0.4</version>
</dependency>
<dependency>
<groupId>net.riotopsys</groupId>
<artifactId>json_patch</artifactId>
@ -1122,11 +1132,21 @@
<artifactId>javassist</artifactId>
<version>3.22.0-GA</version>
</dependency>
<dependency>
<groupId>org.mariadb.jdbc</groupId>
<artifactId>mariadb-java-client</artifactId>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>2.18.3</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>42.2.2.jre7</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-android</artifactId>

View File

@ -12,6 +12,20 @@
as well as for use. JDK 8 remains supported and is the minimum requirement
in order to build or use HAPI FHIR.
</action>
<action type="add">
A new command has been added to the HAPI FHIR CLI tool: "migrate-database". This
command performs the schema modifications required when upgrading HAPI FHIR JPA
to a new version (previously this was a manual process involving running scripts and
reindexing everything).
<![CDATA[
<br/><br>
See the
<a href="http://hapifhir.io/doc_cli.html#migrate-database">command documentation</a>
for more information on how to use this tool. Please post in the HAPI FHIR
Google Group if you run into issues, as this is a brand new framework and we still need
lots of help with testing.
]]>
</action>
<action type="add">
The version of a few dependencies have been bumped to the
latest versions (dependent HAPI modules listed in brackets):
@ -53,8 +67,8 @@
(from version 1.11.6). Thanks to Roman Doboni for the pull request!
</action>
<action type="fix">
A crash in the validator was fixed: Validating a Bundle that did not have fullUrl entries
in a required spot caused a NullPointerException.
A crash in the validator was fixed: Validating a Bundle that did not have Bundle.fullUrl
populated could cause a NullPointerException.
</action>
<action type="add">
AuthorizationInterceptor now examines requests more closely in order
@ -80,6 +94,17 @@
JPA server loading logic has been improved to enhance performance when
loading a large number of results in a page, or when loading multiple
search results with tags. Thanks to Frank Tao for the pull request!
This change was introduced as a part of a collaboration between
HAPI FHIR and the US National Institiutes for Health (NIH).
</action>
<action type="fix" issue="1010">
Resource loading logic for the JPA server has been optimized to
reduce the number of database round trips required when loading
search results where many of the entries have a "forced ID" (an alphanumeric
client-assigned resource ID). Thanks to Frank Tao for the pull
request!
This change was introduced as a part of a collaboration between
HAPI FHIR and the US National Institiutes for Health (NIH).
</action>
<action type="add" issue="1000">
LOINC uploader has been updated to support the new LOINC filename
@ -92,13 +117,6 @@
<![CDATA[<code>resolve()</code>]]> function in its path to descend into
contained resources and index fields within them.
</action>
<action type="fix" issue="1010">
Resource loading logic for the JPA server has been optimized to
reduce the number of database round trips required when loading
search results where many of the entries have a "forced ID" (an alphanumeric
client-assigned resource ID). Thanks to Frank Tao for the pull
request!
</action>
<action type="add">
A new IValidationSupport implementation has been added, named CachingValidationSupport. This
module wraps another implementation and provides short-term caching. This can have a dramatic

View File

@ -136,7 +136,48 @@ Java HotSpot(TM) 64-Bit Server VM (build 25.60-b23, mixed mode)]]></pre>
<h4>LOINC</h4>
<pre>./hapi-fhir-cli upload-terminology -d Downloads/LOINC_2.54_MULTI-AXIAL_HIERARCHY.zip -d Downloads/LOINC_2.54_Text.zip -f dstu3 -t http://localhost:8080/baseDstu3 -u http://loinc.org</pre>
</section>
<a name="migrate-database"/>
<section name="Migrate Database">
<p>
When upgrading the JPA server from one version of HAPI FHIR to a newer version,
often there will be changes to the database schema. The <b>Migrate Database</b>
command can be used to perform a migration from one version to the next.
</p>
<p>
Note that this feature was added in HAPI FHIR 3.5.0. It is not able to migrate
from versions prior to HAPI FHIR 3.4.0. <b>Please make a backup of your
database before running this command!</b>
</p>
<p>
The following example shows how to use the migrator utility to migrate between two versions.
</p>
<pre>./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_5_0</pre>
<p>
You may use the following command to get detailed help on the options:
</p>
<pre>./hapi-fhir-cli help migrate-database</pre>
<p>
Note the arguments:
<ul>
<li><code>-d [dialect]</code> - This indicates the database dialect to use. See the detailed help for a list of options</li>
<li><code>-f [version]</code> - The version to migrate from</li>
<li><code>-t [version]</code> - The version to migrate to</li>
</ul>
</p>
<subsection name="Oracle Support">
<p>
Note that the Oracle JDBC drivers are not distributed in the Maven Central repository,
so they are not included in HAPI FHIR. In order to use this command with an Oracle database,
you will need to invoke the CLI as follows:
</p>
<pre>java -cp hapi-fhir-cli.jar ca.uhn.fhir.cli.App migrate-database -d ORACLE_12C -u "[url]" -n "[username]" -p "[password]" -f V3_4_0 -t V3_5_0</pre>
</subsection>
</section>

View File

@ -67,6 +67,67 @@
</section>
<section name="Announcements">
<p>
<b>Sep 9, 2018 - HAPI FHIR 3.5.0 Released</b> -
The next release of HAPI has now been uploaded to the Maven repos and
GitHub's releases section.
</p>
<p>
This release is happening a little bit later than we had hoped. This release features
a complete reworking of the way that search indexes in the JPA server work, as well
as a new database migration tool that can be used to miograte to a new version of
HAPI FHIR. Testing these features ended up taking longer than we had hoped, but
we think it will be worth the wait.
</p>
<p>
As always, see the
<a href="http://hapifhir.io/changes-report.html">changelog</a> for a full list
of changes. Notable changes include:
</p>
<ul>
<li>
HAPI FHIR now works correctly and is tested on Java 9 and 10, and continues
to support Java 8. Java 9 introduces a new module system that meant a lot of
additional testing and build tweaks were required in order to get HAPI FHIR
working on that platform. Future migrations do not look like they will be
as difficult, and we anticipate supporting current release versions of Java
as they come out.
</li>
<li>
A new databasse migration tool has been added to the HAPI FHIR CLI. This tool
allows administrators to automatically upgrade an existing database schema from
a previous version of HAPI FHIR (currently only HAPI FHIR 3.4.0 is supported)
to the latest version. See the
<a href="http://hapifhir.io/doc_cli.html#migrate-database">command documentation</a>
for more information.
</li>
<li>
The JPA server mechanism for indexing resources has been complertely reworked to
index based on complex hashes of data. This change should have no user-visible
effects, but it does reduce the total size of the database and improve search
performance in many cases. It also lays the groundwork for some other features we have
planned, including native JPA server multitenancy and column-level encryption.
</li>
<li>
Performance in the JPA server has been significantly improved when fetching large
pages of data (e.g. search results or history operations containing 100+
resources in one page). This work comes thanks to a collaboration between
HAPI FHIR and the US National Institutes for Health (NIH).
</li>
<li>
Support for LOINC has been further improved, thanks to an ongoing collaboration
between HAPI FHIR and the Regenstrief Institute.
</li>
</ul>
<p>
Thanks to everyone who contributed to this release!
</p>
<p>
- <a href="https://github.com/jamesagnew/">James Agnew</a>
</p>
<br/><br/>
<p>
<b>May 28, 2018 - HAPI FHIR 3.4.0 Released</b> -
The next release of HAPI has now been uploaded to the Maven repos and