Merge remote-tracking branch 'remotes/origin/master' into ks-20191119-scheduler

# Conflicts:
#	hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
This commit is contained in:
Ken Stevens 2019-12-10 10:10:41 -05:00
commit 176bf657a7
33 changed files with 362 additions and 61 deletions

View File

@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.migrate.BaseMigrator;
import ca.uhn.fhir.jpa.migrate.BruteForceMigrator;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.FlywayMigrator;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
@ -45,7 +44,9 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
public static final String MIGRATE_DATABASE = "migrate-database";
public static final String NO_COLUMN_SHRINK = "no-column-shrink";
public static final String DONT_USE_FLYWAY = "dont-use-flyway";
public static final String OUT_OF_ORDER_PERMITTED = "out-of-order-permitted";
private Set<String> myFlags;
private String myMigrationTableName;
@ -86,7 +87,8 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
addOptionalOption(retVal, "x", "flags", "Flags", "A comma-separated list of any specific migration flags (these flags are version specific, see migrator documentation for details)");
addOptionalOption(retVal, null, DONT_USE_FLYWAY,false, "If this option is set, the migrator will not use FlywayDB for migration. This setting should only be used if you are trying to migrate a legacy database platform that is not supported by FlywayDB.");
addOptionalOption(retVal, null, "no-column-shrink", false, "If this flag is set, the system will not attempt to reduce the length of columns. This is useful in environments with a lot of existing data, where shrinking a column can take a very long time.");
addOptionalOption(retVal, null, OUT_OF_ORDER_PERMITTED,false, "If this option is set, the migrator will permit migration tasks to be run out of order. It shouldn't be required in most cases, however may be the solution if you see the error message 'Detected resolved migration not applied to database'.");
addOptionalOption(retVal, null, NO_COLUMN_SHRINK, false, "If this flag is set, the system will not attempt to reduce the length of columns. This is useful in environments with a lot of existing data, where shrinking a column can take a very long time.");
return retVal;
}
@ -110,7 +112,7 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
}
boolean dryRun = theCommandLine.hasOption("r");
boolean noColumnShrink = theCommandLine.hasOption("no-column-shrink");
boolean noColumnShrink = theCommandLine.hasOption(BaseFlywayMigrateDatabaseCommand.NO_COLUMN_SHRINK);
String flags = theCommandLine.getOptionValue("x");
myFlags = Arrays.stream(defaultString(flags).split(","))
@ -118,7 +120,8 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
.filter(StringUtils::isNotBlank)
.collect(Collectors.toSet());
boolean dontUseFlyway = theCommandLine.hasOption("dont-use-flyway");
boolean dontUseFlyway = theCommandLine.hasOption(BaseFlywayMigrateDatabaseCommand.DONT_USE_FLYWAY);
boolean outOfOrderPermitted = theCommandLine.hasOption(BaseFlywayMigrateDatabaseCommand.OUT_OF_ORDER_PERMITTED);
BaseMigrator migrator;
if (dontUseFlyway) {
@ -132,6 +135,7 @@ public abstract class BaseFlywayMigrateDatabaseCommand<T extends Enum> extends B
migrator.setPassword(password);
migrator.setDryRun(dryRun);
migrator.setNoColumnShrink(noColumnShrink);
migrator.setOutOfOrderPermitted(outOfOrderPermitted);
addTasks(migrator);
migrator.migrate();
}

View File

@ -6,10 +6,10 @@ When upgrading the JPA server from one version of HAPI FHIR to a newer version,
Note that this feature was added in HAPI FHIR 3.5.0. It is not able to migrate from versions prior to HAPI FHIR 3.4.0. **Please make a backup of your database before running this command!**
The following example shows how to use the migrator utility to migrate between two versions.
The following example shows how to use the migrator utility to migrate to the latest version.
```bash
./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_5_0
./hapi-fhir-cli migrate-database -d H2_EMBEDDED -u "jdbc:h2:directory:target/jpaserver_h2_files;create=true" -n "" -p ""
```
You may use the following command to get detailed help on the options:
@ -21,15 +21,13 @@ You may use the following command to get detailed help on the options:
Note the arguments:
* `-d [dialect]` &ndash; This indicates the database dialect to use. See the detailed help for a list of options
* `-f [version]` &ndash; The version to migrate from
* `-t [version]` &ndash; The version to migrate to
# Oracle Support
Note that the Oracle JDBC drivers are not distributed in the Maven Central repository, so they are not included in HAPI FHIR. In order to use this command with an Oracle database, you will need to invoke the CLI as follows:
```bash
java -cp hapi-fhir-cli.jar ca.uhn.fhir.cli.App migrate-database -d ORACLE_12C -u "[url]" -n "[username]" -p "[password]" -f V3_4_0 -t V3_5_0
java -cp hapi-fhir-cli.jar ca.uhn.fhir.cli.App migrate-database -d ORACLE_12C -u "[url]" -n "[username]" -p "[password]"
```
## Migrating 3.4.0 to 3.5.0+
@ -48,7 +46,7 @@ In order to perform a migration using this functionality, the following steps sh
* Run the database migrator command, including the entry `-x no-migrate-350-hashes` on the command line. For example:
```
./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0 -x no-migrate-350-hashes
./hapi-fhir-cli migrate-database -d H2_EMBEDDED -u "jdbc:h2:directory:target/jpaserver_h2_files;create=true" -n "" -p "" -x no-migrate-350-hashes
```
* Rebuild and start your HAPI FHIR JPA server. At this point you should have a working HAPI FHIR JPA 3.6.0 server that is is still using HAPI FHIR 3.4.0 search indexes. Search hashes will be generated for any newly created or updated data but existing data will have null hashes.
@ -66,6 +64,10 @@ SELECT * FROM HFJ_RES_REINDEX_JOB
* Execute the migrator tool again, this time omitting the flag option, e.g.
```bash
./hapi-fhir-cli migrate-database -d DERBY_EMBEDDED -u "jdbc:derby:directory:target/jpaserver_derby_files;create=true" -n "" -p "" -f V3_4_0 -t V3_6_0
./hapi-fhir-cli migrate-database -d H2_EMBEDDED -u "jdbc:h2:directory:target/jpaserver_h2_files;create=true" -n "" -p ""
```
* Rebuild, and start HAPI FHIR JPA again.
# Flyway
As of version 4.2.0, HAPI FHIR JPA now uses Flyway for schema migrations. The "from" and "to" parameters are no longer used. Flyway maintains a list of completed migrations in a table called `FLY_HFJ_MIGRATION`. When you run the migration command, flyway scans the list of completed migrations in this table and compares them to the list of known migrations, and runs only the new ones.

View File

@ -30,6 +30,7 @@ public abstract class BaseMigrator {
private boolean myDryRun;
private boolean myNoColumnShrink;
private boolean myOutOfOrderPermitted;
private DriverTypeEnum myDriverType;
private String myConnectionUrl;
private String myUsername;
@ -89,4 +90,11 @@ public abstract class BaseMigrator {
myPassword = thePassword;
}
public boolean isOutOfOrderPermitted() {
return myOutOfOrderPermitted;
}
public void setOutOfOrderPermitted(boolean theOutOfOrderPermitted) {
myOutOfOrderPermitted = theOutOfOrderPermitted;
}
}

View File

@ -70,6 +70,7 @@ public class FlywayMigrator extends BaseMigrator {
public void migrate() {
try (DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getConnectionUrl(), getUsername(), getPassword())) {
Flyway flyway = initFlyway(connectionProperties);
flyway.repair();
flyway.migrate();
} catch (Exception e) {
throw e;
@ -82,6 +83,7 @@ public class FlywayMigrator extends BaseMigrator {
.table(myMigrationTableName)
.dataSource(getConnectionUrl(), getUsername(), getPassword())
.baselineOnMigrate(true)
.outOfOrder(isOutOfOrderPermitted())
.javaMigrations(myTasks.toArray(new JavaMigration[0]))
.load();
for (FlywayMigration task : myTasks) {

View File

@ -43,6 +43,7 @@ public class SchemaMigrator {
private final String myMigrationTableName;
private final List<BaseTask<?>> myMigrationTasks;
private boolean myDontUseFlyway;
private boolean myOutOfOrderPermitted;
private DriverTypeEnum myDriverType;
/**
@ -64,6 +65,10 @@ public class SchemaMigrator {
myDontUseFlyway = theDontUseFlyway;
}
public void setOutOfOrderPermitted(boolean theOutOfOrderPermitted) {
myOutOfOrderPermitted = theOutOfOrderPermitted;
}
public void validate() {
if (mySkipValidation) {
ourLog.warn("Database running in hibernate auto-update mode. Skipping schema validation.");
@ -102,6 +107,7 @@ public class SchemaMigrator {
migrator.setPassword(myDataSource.getPassword());
} else {
migrator = new FlywayMigrator(myMigrationTableName, myDataSource);
migrator.setOutOfOrderPermitted(myOutOfOrderPermitted);
}
migrator.addTasks(myMigrationTasks);
return migrator;

View File

@ -42,7 +42,7 @@ public class AddColumnTask extends BaseTableColumnTypeTask<AddColumnTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
Set<String> columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), getTableName());
if (columnNames.contains(getColumnName())) {
logInfo(ourLog, "Column {} already exists on table {} - No action performed", getColumnName(), getTableName());

View File

@ -66,7 +66,7 @@ public class AddForeignKeyTask extends BaseTableColumnTask<AddForeignKeyTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
Set<String> existing = JdbcUtils.getForeignKeys(getConnectionProperties(), myForeignTableName, getTableName());
if (existing.contains(myConstraintName)) {

View File

@ -50,7 +50,7 @@ public class AddIdGeneratorTask extends BaseTask<AddIdGeneratorTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
String sql = null;

View File

@ -67,7 +67,7 @@ public class AddIndexTask extends BaseTableTask<AddIndexTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
Set<String> indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), getTableName());
if (indexNames.contains(myIndexName)) {
logInfo(ourLog, "Index {} already exists on table {} - No action performed", myIndexName, getTableName());

View File

@ -58,7 +58,7 @@ public class AddTableByColumnTask extends BaseTableTask<AddTableByColumnTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
if (JdbcUtils.getTableNames(getConnectionProperties()).contains(getTableName())) {
logInfo(ourLog, "Already have table named {} - No action performed", getTableName());

View File

@ -58,7 +58,7 @@ public class AddTableRawSqlTask extends BaseTableTask<AddTableRawSqlTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
if (tableNames.contains(getTableName())) {
logInfo(ourLog, "Table {} already exists - No action performed", getTableName());

View File

@ -63,7 +63,7 @@ public class ArbitrarySqlTask extends BaseTask<ArbitrarySqlTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
logInfo(ourLog, "Starting: {}", myDescription);
if (StringUtils.isNotBlank(myExecuteOnlyIfTableExists)) {

View File

@ -25,7 +25,6 @@ import org.intellij.lang.annotations.Language;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.support.TransactionTemplate;
@ -47,6 +46,7 @@ public abstract class BaseTask<T extends BaseTask> {
private String myDescription;
private int myChangesCount;
private boolean myDryRun;
private boolean myDoNothing;
private List<ExecutedStatement> myExecutedStatements = new ArrayList<>();
private boolean myNoColumnShrink;
private boolean myFailureAllowed;
@ -155,7 +155,15 @@ public abstract class BaseTask<T extends BaseTask> {
return getConnectionProperties().newJdbcTemplate();
}
public abstract void execute() throws SQLException;
public void execute() throws SQLException {
if (myDoNothing) {
ourLog.info("Skipping stubbed task: {}", getDescription());
return;
}
doExecute();
}
public abstract void doExecute() throws SQLException;
public void setFailureAllowed(boolean theFailureAllowed) {
myFailureAllowed = theFailureAllowed;
@ -180,6 +188,15 @@ public abstract class BaseTask<T extends BaseTask> {
}
}
public boolean isDoNothing() {
return myDoNothing;
}
public BaseTask<T> setDoNothing(boolean theDoNothing) {
myDoNothing = theDoNothing;
return this;
}
public static class ExecutedStatement {
private final String mySql;
private final List<Object> myArguments;

View File

@ -58,7 +58,7 @@ public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask
}
@Override
public synchronized void execute() throws SQLException {
public synchronized void doExecute() throws SQLException {
if (isDryRun()) {
return;
}

View File

@ -43,7 +43,7 @@ public class DropColumnTask extends BaseTableColumnTask<DropColumnTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
Set<String> columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), getTableName());
if (!columnNames.contains(getColumnName())) {
logInfo(ourLog, "Column {} does not exist on table {} - No action performed", getColumnName(), getTableName());

View File

@ -65,7 +65,7 @@ public class DropForeignKeyTask extends BaseTableTask<DropForeignKeyTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
Set<String> existing = JdbcUtils.getForeignKeys(getConnectionProperties(), myParentTableName, getTableName());
if (!existing.contains(myConstraintName)) {

View File

@ -50,7 +50,7 @@ public class DropIdGeneratorTask extends BaseTask<DropIdGeneratorTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
String sql = null;

View File

@ -53,7 +53,7 @@ public class DropIndexTask extends BaseTableTask<DropIndexTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
Set<String> indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), getTableName());
if (!indexNames.contains(myIndexName)) {

View File

@ -44,7 +44,7 @@ public class DropTableTask extends BaseTableTask<DropTableTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
if (!tableNames.contains(getTableName())) {
return;

View File

@ -67,7 +67,7 @@ public class ExecuteRawSqlTask extends BaseTask<ExecuteRawSqlTask> {
}
@Override
public void execute() {
public void doExecute() {
List<String> sqlStatements = myDriverToSqls.computeIfAbsent(getDriverType(), t -> new ArrayList<>());
sqlStatements.addAll(myDriverNeutralSqls);

View File

@ -48,7 +48,7 @@ public class InitializeSchemaTask extends BaseTask<InitializeSchemaTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
DriverTypeEnum driverType = getDriverType();
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());

View File

@ -43,7 +43,7 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask<ModifyColumnTask>
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
JdbcUtils.ColumnType existingType;
boolean nullable;

View File

@ -65,7 +65,7 @@ public class RenameColumnTask extends BaseTableTask<RenameColumnTask> {
}
@Override
public void execute() throws SQLException {
public void doExecute() throws SQLException {
Set<String> columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), getTableName());
boolean haveOldName = columnNames.contains(myOldName.toUpperCase());
boolean haveNewName = columnNames.contains(myNewName.toUpperCase());

View File

@ -0,0 +1,137 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
/*-
* #%L
* HAPI FHIR JPA Server - Migration
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.intellij.lang.annotations.Language;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
public class RenameIndexTask extends BaseTableTask<RenameIndexTask> {
private static final Logger ourLog = LoggerFactory.getLogger(RenameIndexTask.class);
private String myOldIndexName;
private String myNewIndexName;
public RenameIndexTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
@Override
public void validate() {
super.validate();
Validate.notBlank(myOldIndexName, "The old index name must not be blank");
Validate.notBlank(myNewIndexName, "The new index name must not be blank");
setDescription("Rename index from " + myOldIndexName + " to " + myNewIndexName + " on table " + getTableName());
}
@Override
public void doExecute() throws SQLException {
Set<String> indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), getTableName());
if (!indexNames.contains(myOldIndexName)) {
logInfo(ourLog, "Index {} does not exist on table {} - No action needed", myOldIndexName, getTableName());
return;
}
List<String> sqls = createRenameIndexSql(getConnectionProperties(), getTableName(), myOldIndexName, myNewIndexName, getDriverType());
if (!sqls.isEmpty()) {
logInfo(ourLog, "Renaming index from {} to {} on table {}", myOldIndexName, myNewIndexName, getTableName());
}
for (@Language("SQL") String sql : sqls) {
executeSql(getTableName(), sql);
}
}
public RenameIndexTask setNewIndexName(String theNewIndexName) {
myNewIndexName = theNewIndexName;
return this;
}
public RenameIndexTask setOldIndexName(String theOldIndexName) {
myOldIndexName = theOldIndexName;
return this;
}
static List<String> createRenameIndexSql(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theOldIndexName, String theNewIndexName, DriverTypeEnum theDriverType) throws SQLException {
Validate.notBlank(theOldIndexName, "theOldIndexName must not be blank");
Validate.notBlank(theNewIndexName, "theNewIndexName must not be blank");
Validate.notBlank(theTableName, "theTableName must not be blank");
if (!JdbcUtils.getIndexNames(theConnectionProperties, theTableName).contains(theOldIndexName)) {
return Collections.emptyList();
}
List<String> sql = new ArrayList<>();
// Drop constraint
switch (theDriverType) {
case MYSQL_5_7:
case MARIADB_10_1:
case DERBY_EMBEDDED:
sql.add("rename index " + theOldIndexName + " to " + theNewIndexName);
break;
case H2_EMBEDDED:
case POSTGRES_9_4:
case ORACLE_12C:
sql.add("alter index " + theOldIndexName + " rename to " + theNewIndexName);
break;
case MSSQL_2012:
sql.add("EXEC sp_rename '" + theTableName + "." + theOldIndexName + "', '" + theNewIndexName + "'");
break;
}
return sql;
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
RenameIndexTask that = (RenameIndexTask) theO;
return new EqualsBuilder()
.appendSuper(super.equals(theO))
.append(myOldIndexName, that.myOldIndexName)
.append(myNewIndexName, that.myNewIndexName)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.appendSuper(super.hashCode())
.append(myOldIndexName)
.append(myNewIndexName)
.toHashCode();
}
}

View File

@ -63,7 +63,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
Builder version = forVersion(VersionEnum.V4_2_0);
}
protected void init410() { // 20190815 - present
protected void init410() { // 20190815 - 20191014
Builder version = forVersion(VersionEnum.V4_1_0);
/*
@ -168,12 +168,12 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.withColumns("VALUESET_PID", "VALUESET_ORDER");
// Account for RESTYPE_LEN column increasing from 30 to 40
version.onTable("HFJ_RESOURCE").modifyColumn("20191002.1", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_VER").modifyColumn("20191002.2", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_HISTORY_TAG").modifyColumn("20191002.3", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_LINK").modifyColumn("20191002.4", "SOURCE_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_LINK").modifyColumn("20191002.5", "TARGET_RESOURCE_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_TAG").modifyColumn("20191002.6", "RES_TYPE").nonNullable().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RESOURCE").modifyColumn("20191002.1", "RES_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_VER").modifyColumn("20191002.2", "RES_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_HISTORY_TAG").modifyColumn("20191002.3", "RES_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_LINK").modifyColumn("20191002.4", "SOURCE_RESOURCE_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_LINK").modifyColumn("20191002.5", "TARGET_RESOURCE_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
version.onTable("HFJ_RES_TAG").modifyColumn("20191002.6", "RES_TYPE").nonNullable().failureAllowed().withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 40);
// TermConceptDesignation
version.startSectionWithMessage("Processing table: TRM_CONCEPT_DESIG");

View File

@ -154,9 +154,26 @@ public class Builder {
}
public void dropIndex(String theVersion, String theIndexName) {
dropIndexOptional(false, theVersion, theIndexName);
}
public void dropIndexStub(String theVersion, String theIndexName) {
dropIndexOptional(true, theVersion, theIndexName);
}
private void dropIndexOptional(boolean theDoNothing, String theVersion, String theIndexName) {
DropIndexTask task = new DropIndexTask(myRelease, theVersion);
task.setIndexName(theIndexName);
task.setTableName(myTableName);
task.setDoNothing(theDoNothing);
addTask(task);
}
public void renameIndex(String theVersion, String theOldIndexName, String theNewIndexName) {
RenameIndexTask task = new RenameIndexTask(myRelease, theVersion);
task.setOldIndexName(theOldIndexName);
task.setNewIndexName(theNewIndexName);
task.setTableName(myTableName);
addTask(task);
}
@ -251,12 +268,21 @@ public class Builder {
myUnique = theUnique;
}
public void withColumnsStub(String... theColumnNames) {
withColumnsOptional(true, theColumnNames);
}
public void withColumns(String... theColumnNames) {
withColumnsOptional(false, theColumnNames);
}
private void withColumnsOptional(boolean theDoNothing, String... theColumnNames) {
AddIndexTask task = new AddIndexTask(myRelease, myVersion);
task.setTableName(myTableName);
task.setIndexName(myIndexName);
task.setUnique(myUnique);
task.setColumns(theColumnNames);
task.setDoNothing(theDoNothing);
addTask(task);
}
}

View File

@ -4,37 +4,30 @@ import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.jpa.migrate.taskdef.AddTableRawSqlTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTest;
import com.google.common.collect.ImmutableList;
import org.flywaydb.core.api.FlywayException;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.ArgumentMatchers;
import javax.annotation.Nonnull;
import java.sql.SQLException;
import java.util.Collections;
import java.util.Properties;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.contains;
import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.*;
public class SchemaMigratorTest extends BaseTest {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SchemaMigratorTest.class);
@Test
public void testMigrationRequired() {
AddTableRawSqlTask task = new AddTableRawSqlTask("1", "1");
task.setTableName("SOMETABLE");
task.addSql(DriverTypeEnum.H2_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
SchemaMigrator schemaMigrator = new SchemaMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), ImmutableList.of(task));
schemaMigrator.setDriverType(DriverTypeEnum.H2_EMBEDDED);
SchemaMigrator schemaMigrator = createTableMigrator();
try {
schemaMigrator.validate();
fail();
} catch (ConfigurationException e) {
assertEquals("The database schema for " + getUrl() + " is out of date. Current database schema version is unknown. Schema version required by application is " + task.getFlywayVersion() + ". Please run the database migrator.", e.getMessage());
assertEquals("The database schema for " + getUrl() + " is out of date. Current database schema version is unknown. Schema version required by application is 1.1. Please run the database migrator.", e.getMessage());
}
schemaMigrator.migrate();
@ -44,12 +37,39 @@ public class SchemaMigratorTest extends BaseTest {
@Test
public void testMigrationRequiredNoFlyway() throws SQLException {
AddTableRawSqlTask task = new AddTableRawSqlTask("1", "1");
task.setTableName("SOMETABLE");
task.addSql(DriverTypeEnum.H2_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
public void testRepairFailedMigration() {
SchemaMigrator schemaMigrator = createSchemaMigrator("SOMETABLE", "create fable SOMETABLE (PID bigint not null, TEXTCOL varchar(255))", "1");
try {
schemaMigrator.migrate();
fail();
} catch (FlywayException e) {
assertEquals(org.springframework.jdbc.BadSqlGrammarException.class, e.getCause().getCause().getClass());
}
schemaMigrator = createTableMigrator();
schemaMigrator.migrate();
}
SchemaMigrator schemaMigrator = new SchemaMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), ImmutableList.of(task));
@Test
public void testOutOfOrderMigration() {
SchemaMigrator schemaMigrator = createSchemaMigrator("SOMETABLE", "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))", "2");
schemaMigrator.migrate();
schemaMigrator = createSchemaMigrator("SOMETABLE" ,"create table SOMEOTHERTABLE (PID bigint not null, TEXTCOL varchar(255))", "1");
try {
schemaMigrator.migrate();
fail();
} catch (FlywayException e) {
assertThat(e.getMessage(), containsString("Detected resolved migration not applied to database: 1.1"));
}
schemaMigrator.setOutOfOrderPermitted(true);
schemaMigrator.migrate();
}
@Test
public void testMigrationRequiredNoFlyway() throws SQLException {
SchemaMigrator schemaMigrator = createTableMigrator();
schemaMigrator.setDriverType(DriverTypeEnum.H2_EMBEDDED);
schemaMigrator.setDontUseFlyway(true);
@ -66,4 +86,18 @@ public class SchemaMigratorTest extends BaseTest {
}
@Nonnull
private SchemaMigrator createTableMigrator() {
return createSchemaMigrator("SOMETABLE", "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))", "1");
}
@Nonnull
private SchemaMigrator createSchemaMigrator(String theTableName, String theSql, String theSchemaVersion) {
AddTableRawSqlTask task = new AddTableRawSqlTask("1", theSchemaVersion);
task.setTableName(theTableName);
task.addSql(DriverTypeEnum.H2_EMBEDDED, theSql);
SchemaMigrator retval = new SchemaMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, getDataSource(), new Properties(), ImmutableList.of(task));
retval.setDriverType(DriverTypeEnum.H2_EMBEDDED);
return retval;
}
}

View File

@ -7,12 +7,13 @@ import ca.uhn.fhir.util.VersionEnum;
import org.junit.Test;
import java.sql.SQLException;
import java.util.Set;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertThat;
public class AddTableByColumnTaskTest extends BaseTest {
@Test
public void testAddTable() throws SQLException {
@ -21,9 +22,14 @@ public class AddTableByColumnTaskTest extends BaseTest {
getMigrator().migrate();
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), containsInAnyOrder("FOO_TABLE", "TGT_TABLE"));
Set<String> indexes = JdbcUtils.getIndexNames(getConnectionProperties(), "FOO_TABLE")
.stream()
.filter(s -> !s.startsWith("FK_REF_INDEX_"))
.filter(s -> !s.startsWith("PRIMARY_KEY_"))
.collect(Collectors.toSet());
assertThat(indexes, containsInAnyOrder("IDX_BONJOUR"));
}
private static class MyMigrationTasks extends BaseMigrationTasks<VersionEnum> {
public MyMigrationTasks() {
Builder v = forVersion(VersionEnum.V3_5_0);
@ -34,10 +40,15 @@ public class AddTableByColumnTaskTest extends BaseTest {
Builder.BuilderAddTableByColumns fooTable = v.addTableByColumns("3", "FOO_TABLE", "PID");
fooTable.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
fooTable.addColumn("HELLO").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
fooTable.addColumn("GOODBYE").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
fooTable.addColumn("COL_REF").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
fooTable.addIndex("4", "IDX_HELLO").unique(true).withColumns("HELLO");
fooTable.addForeignKey("5", "FK_REF").toColumn("COL_REF").references("TGT_TABLE", "PID");
fooTable.addIndex("5", "IDX_GOODBYE").unique(true).withColumnsStub("GOODBYE");
fooTable.dropIndexStub("6", "IDX_HELLO");
fooTable.addForeignKey("7", "FK_REF").toColumn("COL_REF").references("TGT_TABLE", "PID");
Builder.BuilderWithTableName renameIndexTable = v.onTable("FOO_TABLE");
renameIndexTable.renameIndex("8", "IDX_HELLO", "IDX_BONJOUR");
}
}
}

View File

@ -423,7 +423,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
private final IAuthRule myDecidingRule;
private final PolicyEnum myDecision;
Verdict(PolicyEnum theDecision, IAuthRule theDecidingRule) {
public Verdict(PolicyEnum theDecision, IAuthRule theDecidingRule) {
Validate.notNull(theDecision);
myDecision = theDecision;

View File

@ -0,0 +1,48 @@
package ca.uhn.fhir.rest.server;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationFlagsEnum;
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
import ca.uhn.fhir.rest.server.interceptor.auth.IAuthRule;
import ca.uhn.fhir.rest.server.interceptor.auth.IRuleApplier;
import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.Test;
import java.util.Set;
/**
* Tests for {@link Verdict}
*
* @author Jafer Khan Shamshad
*/
public class VerdictTest {
/**
* Implementers should be able to instantiate {@link Verdict} outside the package where it has been defined.
*/
@Test
public void testInstantiationFromAnotherPackage() {
Verdict verdict = new Verdict(PolicyEnum.ALLOW, new CustomRule());
}
/**
* Existing implementations of {@link IAuthRule} are inaccessible from this package.
* This test class is a sample implementation of {@link IAuthRule}.
*/
public static class CustomRule implements IAuthRule {
@Override
public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set<AuthorizationFlagsEnum> theFlags, Pointcut thePointcut) {
return new Verdict(PolicyEnum.ALLOW, this);
}
@Override
public String getName() {
return "Custom rule";
}
}
}

View File

@ -1,9 +1,11 @@
package ca.uhn.fhir.rest.server.interceptor.auth;
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict;
import org.junit.Test;
public class VerdictTest {
@Test
public void testToString() {
Verdict v = new AuthorizationInterceptor.Verdict(PolicyEnum.ALLOW, new RuleImplOp("foo"));
v.toString();

View File

@ -620,7 +620,7 @@
<!-- 9.4.17 seems to have issues -->
<jetty_version>9.4.24.v20191120</jetty_version>
<jsr305_version>3.0.2</jsr305_version>
<flyway_version>6.0.8</flyway_version>
<flyway_version>6.1.0</flyway_version>
<!--<hibernate_version>5.2.10.Final</hibernate_version>-->
<hibernate_version>5.4.6.Final</hibernate_version>
<!-- Update lucene version when you update hibernate-search version -->

View File

@ -54,6 +54,10 @@
A ConcurrentModificationException was sometimes thrown when performing a cascading delete.
This has been corrected.
</action>
<action type="fix" issue="1624">
The constructor for Verdict.java was inadvertantly made private, preventing custom
rules from being written. Thanks to Jafer Khan for the pull request!
</action>
</release>
<release version="4.1.0" date="2019-11-13" description="Jitterbug">
<action type="add">