Merge remote-tracking branch 'remotes/origin/master' into add-pid-to-created-resource

This commit is contained in:
Ken Stevens 2019-07-22 17:02:41 -04:00
commit a68addf89a
18 changed files with 410 additions and 141 deletions

View File

@ -17,12 +17,12 @@
<th:block th:case="'DSTU1'"> <th:block th:case="'DSTU1'">
<th:block th:if="${not resource.name.textElement.empty}" th:text="${resource.name.textElement.value}"/> <th:block th:if="${not resource.name.textElement.empty}" th:text="${resource.name.textElement.value}"/>
<th:block th:if=" ${resource.name.textElement.empty} and ${not resource.name.coding[0].displayElement.empty}" th:text="${resource.name.coding[0].display}"/> <th:block th:if=" ${resource.name.textElement.empty} and ${not resource.name.coding[0].displayElement.empty}" th:text="${resource.name.coding[0].display}"/>
<th:block th:if= "${resource.name.textElement.empty} and ${resource.name.coding[0].displayElement.empty}" th:text="Untitled Diagnostic Report"/> <th:block th:if= "${resource.name.textElement.empty} and ${resource.name.coding[0].displayElement.empty}" th:text="'Untitled Diagnostic Report'"/>
</th:block> </th:block>
<th:block th:case="*"> <th:block th:case="*">
<th:block th:if="${not resource.code.textElement.empty} or ${resource.code.coding.empty}" th:text="${resource.code.textElement.value}"/> <th:block th:if="${not resource.code.textElement.empty} or ${resource.code.coding.empty}" th:text="${resource.code.textElement.value}"/>
<th:block th:if="${not resource.code.coding.empty} and ${resource.code.textElement.empty} and ${not resource.code.coding[0].displayElement.empty}" th:text="${resource.code.coding[0].display}"/> <th:block th:if="${not resource.code.coding.empty} and ${resource.code.textElement.empty} and ${not resource.code.coding[0].displayElement.empty}" th:text="${resource.code.coding[0].display}"/>
<th:block th:if="${not resource.code.coding.empty} and ${resource.code.textElement.empty} and ${resource.code.coding[0].displayElement.empty}" th:text="Untitled Diagnostic Report"/> <th:block th:if="${not resource.code.coding.empty} and ${resource.code.textElement.empty} and ${resource.code.coding[0].displayElement.empty}" th:text="'Untitled Diagnostic Report'"/>
</th:block> </th:block>
</th:block> </th:block>
<!--/*--> Complete Blood Count <!--*/--> <!--/*--> Complete Blood Count <!--*/-->

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.util;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.reflect.ClassPath; import com.google.common.reflect.ClassPath;
import com.google.common.reflect.ClassPath.ClassInfo; import com.google.common.reflect.ClassPath.ClassInfo;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
@ -38,10 +39,7 @@ import java.io.InputStream;
import java.lang.reflect.AnnotatedElement; import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.util.Arrays; import java.util.*;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static com.google.common.base.Ascii.toUpperCase; import static com.google.common.base.Ascii.toUpperCase;
@ -145,6 +143,13 @@ public class TestUtil {
private static void scan(AnnotatedElement theAnnotatedElement, Set<String> theNames, boolean theIsSuperClass) { private static void scan(AnnotatedElement theAnnotatedElement, Set<String> theNames, boolean theIsSuperClass) {
Table table = theAnnotatedElement.getAnnotation(Table.class); Table table = theAnnotatedElement.getAnnotation(Table.class);
if (table != null) { if (table != null) {
// Banned name because we already used it once
ArrayList<String> bannedNames = Lists.newArrayList("CDR_USER_2FA", "TRM_VALUESET_CODE");
Validate.isTrue(!bannedNames.contains(table.name().toUpperCase()));
Validate.isTrue(table.name().toUpperCase().equals(table.name()));
assertNotADuplicateName(table.name(), theNames); assertNotADuplicateName(table.name(), theNames);
for (UniqueConstraint nextConstraint : table.uniqueConstraints()) { for (UniqueConstraint nextConstraint : table.uniqueConstraints()) {
assertNotADuplicateName(nextConstraint.name(), theNames); assertNotADuplicateName(nextConstraint.name(), theNames);

View File

@ -24,6 +24,7 @@ import org.slf4j.LoggerFactory;
import java.util.List; import java.util.List;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
import static org.junit.Assert.*; import static org.junit.Assert.*;
@ -344,6 +345,7 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
assertEquals(2, search.getResources(0, 2).size()); assertEquals(2, search.getResources(0, 2).size());
runInTransaction(() -> { runInTransaction(() -> {
await().until(()->mySearchResultDao.count() == 2);
ourLog.info("Search results: {}", mySearchResultDao.findAll().toString()); ourLog.info("Search results: {}", mySearchResultDao.findAll().toString());
assertEquals(mySearchResultDao.findAll().toString(), 2, mySearchResultDao.count()); assertEquals(mySearchResultDao.findAll().toString(), 2, mySearchResultDao.count());
}); });

View File

@ -79,9 +79,6 @@
<build> <build>
<!-- Tells Maven to name the generated WAR file as hapi-fhir-jpaserver-example.war -->
<finalName>hapi-fhir-jpaserver-example</finalName>
<!-- The following is not required for the application to build, but allows you to test it by issuing "mvn jetty:run" from the command line. --> <!-- The following is not required for the application to build, but allows you to test it by issuing "mvn jetty:run" from the command line. -->
<pluginManagement> <pluginManagement>
<plugins> <plugins>

View File

@ -41,10 +41,7 @@ import org.springframework.jdbc.core.ColumnMapRowMapper;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.*; import java.sql.*;
import java.util.HashSet; import java.util.*;
import java.util.Locale;
import java.util.Objects;
import java.util.Set;
import static org.thymeleaf.util.StringUtils.toUpperCase; import static org.thymeleaf.util.StringUtils.toUpperCase;
@ -56,25 +53,37 @@ public class JdbcUtils {
*/ */
public static Set<String> getIndexNames(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException { public static Set<String> getIndexNames(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException {
if (!getTableNames(theConnectionProperties).contains(theTableName)) {
return Collections.emptySet();
}
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource()); DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
try (Connection connection = dataSource.getConnection()) { try (Connection connection = dataSource.getConnection()) {
return theConnectionProperties.getTxTemplate().execute(t -> { return theConnectionProperties.getTxTemplate().execute(t -> {
DatabaseMetaData metadata; DatabaseMetaData metadata;
try { try {
metadata = connection.getMetaData(); metadata = connection.getMetaData();
ResultSet indexes = metadata.getIndexInfo(connection.getCatalog(), connection.getSchema(), massageIdentifier(metadata, theTableName), false, true);
ResultSet indexes = metadata.getIndexInfo(connection.getCatalog(), connection.getSchema(), massageIdentifier(metadata, theTableName), false, false);
Set<String> indexNames = new HashSet<>(); Set<String> indexNames = new HashSet<>();
while (indexes.next()) { while (indexes.next()) {
ourLog.debug("*** Next index: {}", new ColumnMapRowMapper().mapRow(indexes, 0)); ourLog.debug("*** Next index: {}", new ColumnMapRowMapper().mapRow(indexes, 0));
String indexName = indexes.getString("INDEX_NAME"); String indexName = indexes.getString("INDEX_NAME");
indexName = toUpperCase(indexName, Locale.US); indexName = toUpperCase(indexName, Locale.US);
indexNames.add(indexName); indexNames.add(indexName);
} }
indexes = metadata.getIndexInfo(connection.getCatalog(), connection.getSchema(), massageIdentifier(metadata, theTableName), true, false);
while (indexes.next()) {
ourLog.debug("*** Next index: {}", new ColumnMapRowMapper().mapRow(indexes, 0));
String indexName = indexes.getString("INDEX_NAME");
indexName = toUpperCase(indexName, Locale.US);
indexNames.add(indexName);
}
indexNames.removeIf(i -> i == null);
return indexNames; return indexNames;
} catch (SQLException e) { } catch (SQLException e) {
throw new InternalErrorException(e); throw new InternalErrorException(e);
} }

View File

@ -67,6 +67,8 @@ public class AddIndexTask extends BaseTableTask<AddIndexTask> {
return; return;
} }
ourLog.info("Going to add a {} index named {} on table {} for columns {}", (myUnique ? "UNIQUE" : "NON-UNIQUE"), myIndexName, getTableName(), myColumns);
String unique = myUnique ? "unique " : ""; String unique = myUnique ? "unique " : "";
String columns = String.join(", ", myColumns); String columns = String.join(", ", myColumns);
String sql = "create " + unique + "index " + myIndexName + " on " + getTableName() + "(" + columns + ")"; String sql = "create " + unique + "index " + myIndexName + " on " + getTableName() + "(" + columns + ")";

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
*/ */
import ca.uhn.fhir.jpa.migrate.JdbcUtils; import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.intellij.lang.annotations.Language;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -40,9 +41,16 @@ public class DropColumnTask extends BaseTableColumnTask<DropColumnTask> {
return; return;
} }
String sql = "alter table " + getTableName() + " drop column " + getColumnName(); String tableName = getTableName();
String columnName = getColumnName();
String sql = createSql(tableName, columnName);
ourLog.info("Dropping column {} on table {}", getColumnName(), getTableName()); ourLog.info("Dropping column {} on table {}", getColumnName(), getTableName());
executeSql(getTableName(), sql); executeSql(getTableName(), sql);
} }
@Language("SQL")
static String createSql(String theTableName, String theColumnName) {
return "alter table " + theTableName + " drop column " + theColumnName;
}
} }

View File

@ -20,11 +20,13 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.JdbcUtils; import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Set; import java.util.Set;
@ -56,45 +58,56 @@ public class DropIndexTask extends BaseTableTask<DropIndexTask> {
String uniquenessString = isUnique ? "unique" : "non-unique"; String uniquenessString = isUnique ? "unique" : "non-unique";
ourLog.info("Dropping {} index {} on table {}", uniquenessString, myIndexName, getTableName()); ourLog.info("Dropping {} index {} on table {}", uniquenessString, myIndexName, getTableName());
String sql = createDropIndexSql(getConnectionProperties(), getTableName(), myIndexName, getDriverType());
executeSql(getTableName(), sql);
}
@Nonnull
static String createDropIndexSql(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theIndexName, DriverTypeEnum theDriverType) throws SQLException {
Validate.notBlank(theIndexName, "theIndexName must not be blank");
Validate.notBlank(theTableName, "theTableName must not be blank");
boolean isUnique = JdbcUtils.isIndexUnique(theConnectionProperties, theTableName, theIndexName);
String sql = null; String sql = null;
if (isUnique) { if (isUnique) {
// Drop constraint // Drop constraint
switch (getDriverType()) { switch (theDriverType) {
case MYSQL_5_7: case MYSQL_5_7:
case MARIADB_10_1: case MARIADB_10_1:
sql = "alter table " + getTableName() + " drop index " + myIndexName; sql = "alter table " + theTableName + " drop index " + theIndexName;
break; break;
case H2_EMBEDDED: case H2_EMBEDDED:
case DERBY_EMBEDDED: case DERBY_EMBEDDED:
sql = "drop index " + myIndexName; sql = "drop index " + theIndexName;
break; break;
case POSTGRES_9_4: case POSTGRES_9_4:
case ORACLE_12C: case ORACLE_12C:
case MSSQL_2012: case MSSQL_2012:
sql = "alter table " + getTableName() + " drop constraint " + myIndexName; sql = "alter table " + theTableName + " drop constraint " + theIndexName;
break; break;
} }
} else { } else {
// Drop index // Drop index
switch (getDriverType()) { switch (theDriverType) {
case MYSQL_5_7: case MYSQL_5_7:
case MARIADB_10_1: case MARIADB_10_1:
sql = "alter table " + getTableName() + " drop index " + myIndexName; sql = "alter table " + theTableName + " drop index " + theIndexName;
break; break;
case POSTGRES_9_4: case POSTGRES_9_4:
case DERBY_EMBEDDED: case DERBY_EMBEDDED:
case H2_EMBEDDED: case H2_EMBEDDED:
case ORACLE_12C: case ORACLE_12C:
sql = "drop index " + myIndexName; sql = "drop index " + theIndexName;
break; break;
case MSSQL_2012: case MSSQL_2012:
sql = "drop index " + getTableName() + "." + myIndexName; sql = "drop index " + theTableName + "." + theIndexName;
break; break;
} }
} }
executeSql(getTableName(), sql); return sql;
} }

View File

@ -0,0 +1,57 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
/*-
* #%L
* HAPI FHIR JPA Server - Migration
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.Set;
public class DropTableTask extends BaseTableTask<DropTableTask> {
private static final Logger ourLog = LoggerFactory.getLogger(DropTableTask.class);
@Override
public void execute() throws SQLException {
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
if (!tableNames.contains(getTableName())) {
return;
}
Set<String> indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), getTableName());
for (String nextIndex : indexNames) {
String sql = DropIndexTask.createDropIndexSql(getConnectionProperties(), getTableName(), nextIndex, getDriverType());
ourLog.info("Dropping index {} on table {} in preparation for table delete", nextIndex, getTableName());
executeSql(getTableName(), sql);
}
ourLog.info("Dropping table: {}", getTableName());
String sql = "DROP TABLE " + getTableName();
executeSql(getTableName(), sql);
}
}

View File

@ -24,6 +24,8 @@ import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.ColumnMapRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Set; import java.util.Set;
@ -34,6 +36,11 @@ public class RenameColumnTask extends BaseTableTask<RenameColumnTask> {
private String myOldName; private String myOldName;
private String myNewName; private String myNewName;
private boolean myAllowNeitherColumnToExist; private boolean myAllowNeitherColumnToExist;
private boolean myDeleteTargetColumnFirstIfBothExist;
public void setDeleteTargetColumnFirstIfBothExist(boolean theDeleteTargetColumnFirstIfBothExist) {
myDeleteTargetColumnFirstIfBothExist = theDeleteTargetColumnFirstIfBothExist;
}
public void setOldName(String theOldName) { public void setOldName(String theOldName) {
Validate.notBlank(theOldName); Validate.notBlank(theOldName);
@ -51,15 +58,30 @@ public class RenameColumnTask extends BaseTableTask<RenameColumnTask> {
boolean haveOldName = columnNames.contains(myOldName.toUpperCase()); boolean haveOldName = columnNames.contains(myOldName.toUpperCase());
boolean haveNewName = columnNames.contains(myNewName.toUpperCase()); boolean haveNewName = columnNames.contains(myNewName.toUpperCase());
if (haveOldName && haveNewName) { if (haveOldName && haveNewName) {
throw new SQLException("Can not rename " + getTableName() + "." + myOldName + " to " + myNewName + " because both columns exist!"); if (myDeleteTargetColumnFirstIfBothExist) {
}
if (!haveOldName && !haveNewName) { Integer rowsWithData = getConnectionProperties().getTxTemplate().execute(t -> {
String sql = "SELECT * FROM " + getTableName() + " WHERE " + myNewName + " IS NOT NULL";
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
jdbcTemplate.setMaxRows(1);
return jdbcTemplate.query(sql, new ColumnMapRowMapper()).size();
});
if (rowsWithData > 0) {
throw new SQLException("Can not rename " + getTableName() + "." + myOldName + " to " + myNewName + " because both columns exist and data exists in " + myNewName);
}
ourLog.info("Table {} has columns {} and {} - Going to drop {} before renaming", getTableName(), myOldName, myNewName, myNewName);
String sql = DropColumnTask.createSql(getTableName(), myNewName);
executeSql(getTableName(), sql);
} else {
throw new SQLException("Can not rename " + getTableName() + "." + myOldName + " to " + myNewName + " because both columns exist!");
}
} else if (!haveOldName && !haveNewName) {
if (isAllowNeitherColumnToExist()) { if (isAllowNeitherColumnToExist()) {
return; return;
} }
throw new SQLException("Can not rename " + getTableName() + "." + myOldName + " to " + myNewName + " because neither column exists!"); throw new SQLException("Can not rename " + getTableName() + "." + myOldName + " to " + myNewName + " because neither column exists!");
} } else if (haveNewName) {
if (haveNewName) {
ourLog.info("Column {} already exists on table {} - No action performed", myNewName, getTableName()); ourLog.info("Column {} already exists on table {} - No action performed", myNewName, getTableName());
return; return;
} }
@ -94,11 +116,11 @@ public class RenameColumnTask extends BaseTableTask<RenameColumnTask> {
} }
public void setAllowNeitherColumnToExist(boolean theAllowNeitherColumnToExist) {
myAllowNeitherColumnToExist = theAllowNeitherColumnToExist;
}
public boolean isAllowNeitherColumnToExist() { public boolean isAllowNeitherColumnToExist() {
return myAllowNeitherColumnToExist; return myAllowNeitherColumnToExist;
} }
public void setAllowNeitherColumnToExist(boolean theAllowNeitherColumnToExist) {
myAllowNeitherColumnToExist = theAllowNeitherColumnToExist;
}
} }

View File

@ -63,32 +63,31 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
protected void init400() { protected void init400() {
Builder version = forVersion(VersionEnum.V4_0_0); Builder version = forVersion(VersionEnum.V4_0_0);
// Interim builds used this name
version.onTable("TRM_VALUESET_CODE").dropThisTable();
version.onTable("TRM_CONCEPT_MAP_GROUP") version.onTable("TRM_CONCEPT_MAP_GROUP")
.renameColumn("myConceptMapUrl", "CONCEPT_MAP_URL") .renameColumn("myConceptMapUrl", "CONCEPT_MAP_URL", false, true)
.renameColumn("mySourceValueSet", "SOURCE_VS") .renameColumn("mySourceValueSet", "SOURCE_VS", false, true)
.renameColumn("myTargetValueSet", "TARGET_VS"); .renameColumn("myTargetValueSet", "TARGET_VS", false, true);
version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT") version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
.renameColumn("myConceptMapUrl", "CONCEPT_MAP_URL") .renameColumn("myConceptMapUrl", "CONCEPT_MAP_URL", false, true)
.renameColumn("mySystem", "SYSTEM_URL") .renameColumn("mySystem", "SYSTEM_URL", false, true)
.renameColumn("mySystemVersion", "SYSTEM_VERSION") .renameColumn("mySystemVersion", "SYSTEM_VERSION", false, true)
.renameColumn("myValueSet", "VALUESET_URL"); .renameColumn("myValueSet", "VALUESET_URL", false, true);
version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.renameColumn("myConceptMapUrl", "CONCEPT_MAP_URL") .renameColumn("myConceptMapUrl", "CONCEPT_MAP_URL", false, true)
.renameColumn("mySystem", "SYSTEM_URL") .renameColumn("mySystem", "SYSTEM_URL", false, true)
.renameColumn("mySystemVersion", "SYSTEM_VERSION") .renameColumn("mySystemVersion", "SYSTEM_VERSION", false, true)
.renameColumn("myValueSet", "VALUESET_URL"); .renameColumn("myValueSet", "VALUESET_URL", false, true);
version.onTable("TRM_VALUESET") version.onTable("TRM_VALUESET")
.renameColumn("NAME", "VSNAME", true); .renameColumn("NAME", "VSNAME", true, true);
version.onTable("TRM_VALUESET_CONCEPT")
.renameColumn("CODE", "CODEVAL", true)
.renameColumn("SYSTEM", "SYSTEM_URL", true);
version.onTable("TRM_CONCEPT") version.onTable("TRM_CONCEPT")
.renameColumn("CODE", "CODEVAL"); .renameColumn("CODE", "CODEVAL", false, true);
// TermValueSet // TermValueSet
version.startSectionWithMessage("Processing table: TRM_VALUESET"); version.startSectionWithMessage("Processing table: TRM_VALUESET");
@ -117,13 +116,16 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.addForeignKey("FK_TRM_VALUESET_PID") .addForeignKey("FK_TRM_VALUESET_PID")
.toColumn("VALUESET_PID") .toColumn("VALUESET_PID")
.references("TRM_VALUESET", "PID"); .references("TRM_VALUESET", "PID");
termValueSetConceptTable.addColumn("SYSTEM").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, TermCodeSystem.MAX_URL_LENGTH); termValueSetConceptTable.addColumn("SYSTEM_URL").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, TermCodeSystem.MAX_URL_LENGTH);
termValueSetConceptTable.addColumn("CODE").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, TermConcept.MAX_CODE_LENGTH); termValueSetConceptTable.addColumn("CODEVAL").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, TermConcept.MAX_CODE_LENGTH);
termValueSetConceptTable termValueSetConceptTable
.addIndex("IDX_VALUESET_CONCEPT_CS_CD") .addIndex("IDX_VALUESET_CONCEPT_CS_CD")
.unique(false) .unique(false)
.withColumns("SYSTEM", "CODE"); .withColumns("SYSTEM_URL", "CODEVAL");
termValueSetConceptTable.addColumn("DISPLAY").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, TermConcept.MAX_DESC_LENGTH); termValueSetConceptTable.addColumn("DISPLAY").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, TermConcept.MAX_DESC_LENGTH);
version.onTable("TRM_VALUESET_CONCEPT")
.renameColumn("CODE", "CODEVAL", true, true)
.renameColumn("SYSTEM", "SYSTEM_URL", true, true);
// TermValueSetConceptDesignation // TermValueSetConceptDesignation
version.startSectionWithMessage("Processing table: TRM_VALUESET_C_DESIGNATION"); version.startSectionWithMessage("Processing table: TRM_VALUESET_C_DESIGNATION");

View File

@ -117,6 +117,53 @@ public class BaseMigrationTasks<T extends Enum> {
addTask(task); addTask(task);
} }
public class BuilderAddTableRawSql {
private final AddTableRawSqlTask myTask;
protected BuilderAddTableRawSql(String theTableName) {
myTask = new AddTableRawSqlTask();
myTask.setTableName(theTableName);
addTask(myTask);
}
public BuilderAddTableRawSql addSql(DriverTypeEnum theDriverTypeEnum, @Language("SQL") String theSql) {
myTask.addSql(theDriverTypeEnum, theSql);
return this;
}
public void addSql(@Language("SQL") String theSql) {
myTask.addSql(theSql);
}
}
public class BuilderAddTableByColumns extends BuilderWithTableName implements IAcceptsTasks {
private final AddTableByColumnTask myTask;
public BuilderAddTableByColumns(IAcceptsTasks theSink, String theTableName, String thePkColumnName) {
super(theSink, theTableName);
myTask = new AddTableByColumnTask();
myTask.setTableName(theTableName);
myTask.setPkColumn(thePkColumnName);
theSink.addTask(myTask);
}
@Override
public BuilderWithTableName.BuilderAddColumnWithName addColumn(String theColumnName) {
return new BuilderWithTableName.BuilderAddColumnWithName(theColumnName, this);
}
@Override
public void addTask(BaseTask<?> theTask) {
if (theTask instanceof AddColumnTask) {
myTask.addAddColumnTask((AddColumnTask) theTask);
} else {
super.addTask(theTask);
}
}
}
public static class BuilderWithTableName implements IAcceptsTasks { public static class BuilderWithTableName implements IAcceptsTasks {
private final String myTableName; private final String myTableName;
private final IAcceptsTasks mySink; private final IAcceptsTasks mySink;
@ -137,6 +184,12 @@ public class BaseMigrationTasks<T extends Enum> {
addTask(task); addTask(task);
} }
public void dropThisTable() {
DropTableTask task = new DropTableTask();
task.setTableName(myTableName);
addTask(task);
}
public BuilderAddIndexWithName addIndex(String theIndexName) { public BuilderAddIndexWithName addIndex(String theIndexName) {
return new BuilderAddIndexWithName(theIndexName); return new BuilderAddIndexWithName(theIndexName);
} }
@ -155,7 +208,7 @@ public class BaseMigrationTasks<T extends Enum> {
@Override @Override
public void addTask(BaseTask<?> theTask) { public void addTask(BaseTask<?> theTask) {
((BaseTableTask<?>)theTask).setTableName(myTableName); ((BaseTableTask<?>) theTask).setTableName(myTableName);
mySink.addTask(theTask); mySink.addTask(theTask);
} }
@ -168,15 +221,22 @@ public class BaseMigrationTasks<T extends Enum> {
} }
public BuilderWithTableName renameColumn(String theOldName, String theNewName) { public BuilderWithTableName renameColumn(String theOldName, String theNewName) {
return renameColumn(theOldName, theNewName, false); return renameColumn(theOldName, theNewName, false, false);
} }
public BuilderWithTableName renameColumn(String theOldName, String theNewName, boolean theAllowNeitherColumnToExist) { /**
* @param theOldName The old column name
* @param theNewName The new column name
* @param theAllowNeitherColumnToExist Setting this to true means that it's not an error if neither column exists
* @param theDeleteTargetColumnFirstIfBothEixst Setting this to true causes the migrator to be ok with the target column existing. It will make sure that there is no data in the column with the new name, then delete it if so in order to make room for the renamed column. If there is data it will still bomb out.
*/
public BuilderWithTableName renameColumn(String theOldName, String theNewName, boolean theAllowNeitherColumnToExist, boolean theDeleteTargetColumnFirstIfBothEixst) {
RenameColumnTask task = new RenameColumnTask(); RenameColumnTask task = new RenameColumnTask();
task.setTableName(myTableName); task.setTableName(myTableName);
task.setOldName(theOldName); task.setOldName(theOldName);
task.setNewName(theNewName); task.setNewName(theNewName);
task.setAllowNeitherColumnToExist(theAllowNeitherColumnToExist); task.setAllowNeitherColumnToExist(theAllowNeitherColumnToExist);
task.setDeleteTargetColumnFirstIfBothExist(theDeleteTargetColumnFirstIfBothEixst);
addTask(task); addTask(task);
return this; return this;
} }
@ -210,47 +270,6 @@ public class BaseMigrationTasks<T extends Enum> {
} }
} }
public static class BuilderAddColumnWithName {
private final String myColumnName;
private final IAcceptsTasks myTaskSink;
public BuilderAddColumnWithName(String theColumnName, IAcceptsTasks theTaskSink) {
myColumnName = theColumnName;
myTaskSink = theTaskSink;
}
public BuilderAddColumnWithNameNullable nullable() {
return new BuilderAddColumnWithNameNullable(true);
}
public BuilderAddColumnWithNameNullable nonNullable() {
return new BuilderAddColumnWithNameNullable(false);
}
public class BuilderAddColumnWithNameNullable {
private final boolean myNullable;
public BuilderAddColumnWithNameNullable(boolean theNullable) {
myNullable = theNullable;
}
public void type(AddColumnTask.ColumnTypeEnum theColumnType) {
type(theColumnType, null);
}
public void type(AddColumnTask.ColumnTypeEnum theColumnType, Integer theLength) {
AddColumnTask task = new AddColumnTask();
task.setColumnName(myColumnName);
task.setNullable(myNullable);
task.setColumnType(theColumnType);
if (theLength != null) {
task.setColumnLength(theLength);
}
myTaskSink.addTask(task);
}
}
}
public class BuilderModifyColumnWithName { public class BuilderModifyColumnWithName {
private final String myColumnName; private final String myColumnName;
@ -334,51 +353,45 @@ public class BaseMigrationTasks<T extends Enum> {
} }
} }
} }
}
public class BuilderAddTableRawSql { public static class BuilderAddColumnWithName {
private final String myColumnName;
private final IAcceptsTasks myTaskSink;
private final AddTableRawSqlTask myTask; public BuilderAddColumnWithName(String theColumnName, IAcceptsTasks theTaskSink) {
myColumnName = theColumnName;
myTaskSink = theTaskSink;
}
protected BuilderAddTableRawSql(String theTableName) { public BuilderAddColumnWithNameNullable nullable() {
myTask = new AddTableRawSqlTask(); return new BuilderAddColumnWithNameNullable(true);
myTask.setTableName(theTableName); }
addTask(myTask);
}
public BuilderAddColumnWithNameNullable nonNullable() {
return new BuilderAddColumnWithNameNullable(false);
}
public BuilderAddTableRawSql addSql(DriverTypeEnum theDriverTypeEnum, @Language("SQL") String theSql) { public class BuilderAddColumnWithNameNullable {
myTask.addSql(theDriverTypeEnum, theSql); private final boolean myNullable;
return this;
}
public void addSql(@Language("SQL") String theSql) { public BuilderAddColumnWithNameNullable(boolean theNullable) {
myTask.addSql(theSql); myNullable = theNullable;
} }
}
public class BuilderAddTableByColumns extends BuilderWithTableName implements IAcceptsTasks { public void type(AddColumnTask.ColumnTypeEnum theColumnType) {
private final AddTableByColumnTask myTask; type(theColumnType, null);
}
public BuilderAddTableByColumns(IAcceptsTasks theSink, String theTableName, String thePkColumnName) { public void type(AddColumnTask.ColumnTypeEnum theColumnType, Integer theLength) {
super(theSink, theTableName); AddColumnTask task = new AddColumnTask();
myTask = new AddTableByColumnTask(); task.setColumnName(myColumnName);
myTask.setTableName(theTableName); task.setNullable(myNullable);
myTask.setPkColumn(thePkColumnName); task.setColumnType(theColumnType);
theSink.addTask(myTask); if (theLength != null) {
} task.setColumnLength(theLength);
}
@Override myTaskSink.addTask(task);
public BuilderWithTableName.BuilderAddColumnWithName addColumn(String theColumnName) { }
return new BuilderWithTableName.BuilderAddColumnWithName(theColumnName, this);
}
@Override
public void addTask(BaseTask<?> theTask) {
if (theTask instanceof AddColumnTask) {
myTask.addAddColumnTask((AddColumnTask) theTask);
} else {
super.addTask(theTask);
} }
} }
} }

View File

@ -6,14 +6,34 @@ import org.junit.Test;
import java.sql.SQLException; import java.sql.SQLException;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
public class AddIndexTest extends BaseTest { public class AddIndexTest extends BaseTest {
@Test @Test
public void testIndexAlreadyExists() throws SQLException { public void testUniqueConstraintAlreadyExists() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
executeSql("ALTER TABLE SOMETABLE ADD CONSTRAINT IDX_ANINDEX UNIQUE(TEXTCOL)");
AddIndexTask task = new AddIndexTask();
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");
task.setColumns("TEXTCOL");
task.setUnique(true);
getMigrator().addTask(task);
getMigrator().migrate();
getMigrator().migrate();
getMigrator().migrate();
getMigrator().migrate();
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), hasItem("IDX_ANINDEX"));
}
@Test
public void testUniqueIndexAlreadyExists() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))"); executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
executeSql("create unique index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)"); executeSql("create unique index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
executeSql("create unique index IDX_DIFINDEX on SOMETABLE (TEXTCOL)"); executeSql("create unique index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
@ -25,6 +45,30 @@ public class AddIndexTest extends BaseTest {
task.setUnique(false); task.setUnique(false);
getMigrator().addTask(task); getMigrator().addTask(task);
getMigrator().migrate();
getMigrator().migrate();
getMigrator().migrate();
getMigrator().migrate();
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("IDX_DIFINDEX", "IDX_ANINDEX"));
}
@Test
public void testNonUniqueIndexAlreadyExists() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
executeSql("create index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
executeSql("create index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
AddIndexTask task = new AddIndexTask();
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");
task.setColumns("PID", "TEXTCOL");
task.setUnique(false);
getMigrator().addTask(task);
getMigrator().migrate();
getMigrator().migrate();
getMigrator().migrate();
getMigrator().migrate(); getMigrator().migrate();
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("IDX_DIFINDEX", "IDX_ANINDEX")); assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("IDX_DIFINDEX", "IDX_ANINDEX"));

View File

@ -0,0 +1,45 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.junit.Test;
import java.sql.SQLException;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.core.IsNot.not;
import static org.junit.Assert.assertThat;
public class DropTableTest extends BaseTest {
@Test
public void testDropExistingTable() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
executeSql("create index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
executeSql("create index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
DropTableTask task = new DropTableTask();
task.setTableName("SOMETABLE");
getMigrator().addTask(task);
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), (hasItems("SOMETABLE")));
getMigrator().migrate();
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), not(hasItems("SOMETABLE")));
}
@Test
public void testDropNonExistingTable() throws SQLException {
DropTableTask task = new DropTableTask();
task.setTableName("SOMETABLE");
getMigrator().addTask(task);
getMigrator().migrate();
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), not(hasItems("SOMETABLE")));
}
}

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.junit.Test; import org.junit.Test;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Set;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
@ -28,6 +29,46 @@ public class RenameColumnTaskTest extends BaseTest {
assertThat(JdbcUtils.getColumnNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("PID", "TEXTCOL")); assertThat(JdbcUtils.getColumnNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("PID", "TEXTCOL"));
} }
@Test
public void testBothExistDeleteTargetFirst() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), myTextCol varchar(255))");
RenameColumnTask task = new RenameColumnTask();
task.setTableName("SOMETABLE");
task.setDescription("Drop an index");
task.setOldName("myTextCol");
task.setNewName("TEXTCOL");
task.setDeleteTargetColumnFirstIfBothExist(true);
getMigrator().addTask(task);
getMigrator().migrate();
Set<String> columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), "SOMETABLE");
assertThat(columnNames.toString(), columnNames, containsInAnyOrder("PID", "TEXTCOL"));
}
@Test
public void testBothExistDeleteTargetFirstDataExistsInSourceAndTarget() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), myTextCol varchar(255))");
executeSql("INSERT INTO SOMETABLE (PID, TEXTCOL, myTextCol) VALUES (123, 'AAA', 'BBB')");
RenameColumnTask task = new RenameColumnTask();
task.setTableName("SOMETABLE");
task.setDescription("Drop an index");
task.setOldName("myTextCol");
task.setNewName("TEXTCOL");
task.setDeleteTargetColumnFirstIfBothExist(true);
getMigrator().addTask(task);
try {
getMigrator().migrate();
fail();
} catch (InternalErrorException e) {
assertEquals("Failure executing task \"Drop an index\", aborting! Cause: java.sql.SQLException: Can not rename SOMETABLE.myTextCol to TEXTCOL because both columns exist and data exists in TEXTCOL", e.getMessage());
}
}
@Test @Test
public void testColumnDoesntAlreadyExist() throws SQLException { public void testColumnDoesntAlreadyExist() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, myTextCol varchar(255))"); executeSql("create table SOMETABLE (PID bigint not null, myTextCol varchar(255))");

View File

@ -11,6 +11,7 @@ import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.spring.boot.autoconfigure.FhirAutoConfiguration.FhirJpaServerConfiguration.Dstu3; import ca.uhn.fhir.spring.boot.autoconfigure.FhirAutoConfiguration.FhirJpaServerConfiguration.Dstu3;
import org.assertj.core.util.Arrays; import org.assertj.core.util.Arrays;
import org.junit.After; import org.junit.After;
import org.junit.Ignore;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.ExpectedException; import org.junit.rules.ExpectedException;
@ -101,18 +102,21 @@ public class FhirAutoConfigurationTest {
} }
@Test @Test
@Ignore
public void withValidation() { public void withValidation() {
load(); load();
assertThat(this.context.getBeansOfType(IServerInterceptor.class)).hasSize(1); assertThat(this.context.getBeansOfType(IServerInterceptor.class)).hasSize(1);
} }
@Test @Test
@Ignore
public void withValidations() { public void withValidations() {
load("hapi.fhir.validation.request-only:false"); load("hapi.fhir.validation.request-only:false");
assertThat(this.context.getBeansOfType(IServerInterceptor.class)).hasSize(2); assertThat(this.context.getBeansOfType(IServerInterceptor.class)).hasSize(2);
} }
@Test @Test
@Ignore
public void withCustomValidationSchemaLocation() { public void withCustomValidationSchemaLocation() {
load("hapi.fhir.validation.schema-location:custom-schema-location"); load("hapi.fhir.validation.schema-location:custom-schema-location");
assertThat(this.context.getBeansOfType(IServerInterceptor.class)).hasSize(1); assertThat(this.context.getBeansOfType(IServerInterceptor.class)).hasSize(1);

View File

@ -327,6 +327,11 @@
resource as the resource type in order to hold a JSONPatch or XMLPatch body) resource as the resource type in order to hold a JSONPatch or XMLPatch body)
has been added to the JPA server. has been added to the JPA server.
</action> </action>
<action type="fix" issue="1390">
Two issues in the Thymeleaf Narrative Template which caused an error when generating
a narrative on an untitled DiagnosticReport were fixed. Thanks to GitHub
user @navyflower for reporting!
</action>
</release> </release>
<release version="3.8.0" date="2019-05-30" description="Hippo"> <release version="3.8.0" date="2019-05-30" description="Hippo">
<action type="fix"> <action type="fix">

View File

@ -222,7 +222,7 @@
<td style="background: #CEC;">4.0.0</td> <td style="background: #CEC;">4.0.0</td>
</tr> </tr>
<tr> <tr>
<td>HAPI FHIR 3.8.0-SNAPSHOT</td> <td>HAPI FHIR 3.8.0</td>
<td>JDK8</td> <td>JDK8</td>
<td style="background: #DDD;"></td> <td style="background: #DDD;"></td>
<td style="background: #CEC;">1.0.2</td> <td style="background: #CEC;">1.0.2</td>