diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseMigrateDatabaseCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseMigrateDatabaseCommand.java
index f6e9f2569ab..2685989c610 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseMigrateDatabaseCommand.java
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseMigrateDatabaseCommand.java
@@ -9,9 +9,9 @@ package ca.uhn.fhir.cli;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -25,15 +25,24 @@ import ca.uhn.fhir.jpa.migrate.Migrator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
+import org.apache.commons.lang3.StringUtils;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
+import java.util.Set;
import java.util.stream.Collectors;
+import static org.apache.commons.lang3.StringUtils.defaultString;
+
public abstract class BaseMigrateDatabaseCommand extends BaseCommand {
private static final String MIGRATE_DATABASE = "migrate-database";
+ private Set myFlags;
+
+ protected Set getFlags() {
+ return myFlags;
+ }
@Override
public String getCommandDescription() {
@@ -68,6 +77,7 @@ public abstract class BaseMigrateDatabaseCommand extends BaseCom
addRequiredOption(retVal, "f", "from", "Version", "The database schema version to migrate FROM");
addRequiredOption(retVal, "t", "to", "Version", "The database schema version to migrate TO");
addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
+ addOptionalOption(retVal, "x", "flags", "Flags", "A comma-separated list of any specific migration flags (these flags are version specific, see migrator documentation for details)");
return retVal;
}
@@ -97,6 +107,12 @@ public abstract class BaseMigrateDatabaseCommand extends BaseCom
boolean dryRun = theCommandLine.hasOption("r");
+ String flags = theCommandLine.getOptionValue("x");
+ myFlags = Arrays.stream(defaultString(flags).split(","))
+ .map(String::trim)
+ .filter(StringUtils::isNotBlank)
+ .collect(Collectors.toSet());
+
Migrator migrator = new Migrator();
migrator.setConnectionUrl(url);
migrator.setDriverType(driverType);
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommand.java
index ff7d39c0fda..6be5b241110 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommand.java
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommand.java
@@ -42,7 +42,7 @@ public class HapiMigrateDatabaseCommand extends BaseMigrateDatabaseCommand> tasks = new HapiFhirJpaMigrationTasks().getTasks(theFrom, theTo);
+ List> tasks = new HapiFhirJpaMigrationTasks(getFlags()).getTasks(theFrom, theTo);
tasks.forEach(theMigrator::addTask);
}
}
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommandTest.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommandTest.java
index 7051011ff58..aaee5c3e4aa 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommandTest.java
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/test/java/ca/uhn/fhir/cli/HapiMigrateDatabaseCommandTest.java
@@ -7,14 +7,26 @@ import org.apache.commons.io.IOUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.jdbc.core.support.AbstractLobCreatingPreparedStatementCallback;
+import org.springframework.jdbc.support.lob.DefaultLobHandler;
+import org.springframework.jdbc.support.lob.LobCreator;
import java.io.File;
import java.io.IOException;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
+import java.util.Map;
import static org.apache.commons.lang3.StringUtils.isBlank;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
public class HapiMigrateDatabaseCommandTest {
@@ -25,39 +37,20 @@ public class HapiMigrateDatabaseCommandTest {
}
@Test
- public void testMigrate() throws IOException {
+ public void testMigrate_340_350() throws IOException {
File directory = new File("target/migrator_derby_test_340_350");
if (directory.exists()) {
FileUtils.deleteDirectory(directory);
}
- String url = "jdbc:derby:directory:target/migrator_derby_test_340_350;create=true";
+ String url = "jdbc:derby:directory:" + directory.getAbsolutePath() + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "", "");
- String script = IOUtils.toString(HapiMigrateDatabaseCommandTest.class.getResourceAsStream("/persistence_create_derby107_340.sql"), Charsets.UTF_8);
- List scriptStatements = new ArrayList<>(Arrays.asList(script.split("\n")));
- for (int i = 0; i < scriptStatements.size(); i++) {
- String nextStatement = scriptStatements.get(i);
- if (isBlank(nextStatement)) {
- scriptStatements.remove(i);
- i--;
- continue;
- }
+ String initSql = "/persistence_create_derby107_340.sql";
+ executeSqlStatements(connectionProperties, initSql);
- nextStatement = nextStatement.trim();
- while (nextStatement.endsWith(";")) {
- nextStatement = nextStatement.substring(0, nextStatement.length() - 1);
- }
- scriptStatements.set(i, nextStatement);
- }
-
- connectionProperties.getTxTemplate().execute(t -> {
- for (String next : scriptStatements) {
- connectionProperties.newJdbcTemplate().execute(next);
- }
- return null;
- });
+ seedDatabase340(connectionProperties);
ourLog.info("**********************************************");
ourLog.info("Done Setup, Starting Dry Run...");
@@ -75,6 +68,13 @@ public class HapiMigrateDatabaseCommandTest {
};
App.main(args);
+ connectionProperties.getTxTemplate().execute(t -> {
+ JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
+ List
+
+
+
+ Note that the Oracle JDBC drivers are not distributed in the Maven Central repository,
+ so they are not included in HAPI FHIR. In order to use this command with an Oracle database,
+ you will need to invoke the CLI as follows:
+
+ java -cp hapi-fhir-cli.jar ca.uhn.fhir.cli.App migrate-database -d ORACLE_12C -u "[url]" -n "[username]" -p "[password]" -f V3_4_0 -t V3_5_0
+
+
+
+
+ As of HAPI FHIR 3.5.0 a new mechanism for creating the JPA index tables (HFJ_SPIDX_xxx)
+ has been implemented. This new mechanism uses hashes in place of large multi-column
+ indexes. This improves both lookup times as well as required storage space. This change
+ also paves the way for future ability to provide efficient multi-tenant searches (which
+ is not yet implemented but is planned as an incremental improvement).
+
+
+ This change is not a lightweight change however, as it requires a rebuild of the
+ index tables in order to generate the hashes. This can take a long time on databases
+ that already have a large amount of data.
+
+
+ As a result, in HAPI FHIR JPA 3.6.0, an efficient way of upgrading existing databases
+ was added. Under this new scheme, columns for the hashes are added but values are not
+ calculated initially, database indexes are not modified on the HFJ_SPIDX_xxx tables,
+ and the previous columns are still used for searching as was the case in HAPI FHIR
+ JPA 3.4.0.
+
+
+ In order to perform a migration using this functionality, the following steps should
+ be followed:
+
+
+ -
+ Stop your running HAPI FHIR JPA instance (and remember to make a backup of your
+ database before proceeding with any changes!)
+
+ -
+ Modify your
DaoConfig
to specify that hash-based searches should not be used, using
+ the following setting:
+ myDaoConfig.setDisableHashBasedSearches(true);
+
+ -
+ Make sure that you have your JPA settings configured to not automatically
+ create database indexes and columns using the following setting
+ in your JPA Properties:
+ extraProperties.put("hibernate.hbm2ddl.auto", "none");
+
+ -
+ Run the database migrator command, including the entry
-x no-migrate-350-hashes
+ on the command line. For example:
+ ./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0 -x no-migrate-350-hashes
+
+ -
+ Rebuild and start your HAPI FHIR JPA server. At this point you should have a working
+ HAPI FHIR JPA 3.6.0 server that is is still using HAPI FHIR 3.4.0 search indexes. Search hashes
+ will be generated for any newly created or updated data but existing data will have null
+ hashes.
+
+ -
+ With the system running, request a complete reindex of the data in the database using
+ an HTTP request such as the following:
+ GET /$mark-all-resources-for-reindexing
+ Note that this is a custom operation built into the HAPI FHIR JPA server. It should
+ be secured in a real deployment, so Authentication is likely required for this
+ call.
+
+ -
+ You can track the reindexing process by watching your server logs,
+ but also by using the following SQL executed directly against your database:
+
+ SELECT * FROM HFJ_RES_REINDEX_JOB
+ When this query no longer returns any rows, the reindexing process is complete.
+
+ -
+ At this time, HAPI FHIR should be stopped once again in order to convert it
+ to using the hash based indexes.
+
+ -
+ Modify your
DaoConfig
to specify that hash-based searches are used, using
+ the following setting (this is the default setting, so it could also simply
+ be omitted):
+ myDaoConfig.setDisableHashBasedSearches(false);
+
+ -
+ Execute the migrator tool again, this time omitting the flag option, e.g.
+ ./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0
+
+ -
+ Rebuild, and start HAPI FHIR JPA again.
+
+
+
+
+
+