Introduce TRM_CONCEPT_DESIG.VAL_VC column to replace VAL (#6080)

* Extend TRM_CONCEPT_DESIG.VAL column from 2,000 to 4,000 characters.

* Increase to 8,000 chars.  Add jira to changelog.

* Implement new design.   Still needs new/updated tests.

* Fix setter.

* Make both columns nullable.  Fix changelog.  Add better error message for HapiMigrator.

* Enhance test.

* Small code review suggestion.

* Cosmetic change to re-trigger build.
This commit is contained in:
Luke deGruchy 2024-07-08 14:19:33 -04:00 committed by GitHub
parent aafa445d19
commit ef0aa11e00
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 154 additions and 20 deletions

View File

@ -0,0 +1,5 @@
---
type: add
issue: 6079
jira: SMILE-8644
title: "It is now possible to save TermConceptDesignations with values over 2,000 characters."

View File

@ -35,8 +35,10 @@ import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.Length;
import java.io.Serializable; import java.io.Serializable;
import java.util.Objects;
import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.left;
import static org.apache.commons.lang3.StringUtils.length; import static org.apache.commons.lang3.StringUtils.length;
@ -82,8 +84,11 @@ public class TermConceptDesignation implements Serializable {
@Column(name = "USE_DISPLAY", nullable = true, length = MAX_LENGTH) @Column(name = "USE_DISPLAY", nullable = true, length = MAX_LENGTH)
private String myUseDisplay; private String myUseDisplay;
@Column(name = "VAL", nullable = false, length = MAX_VAL_LENGTH) @Column(name = "VAL", nullable = true, length = MAX_VAL_LENGTH)
private String myValue; private String myValue;
@Column(name = "VAL_VC", nullable = true, length = Length.LONG32)
private String myValueVc;
/** /**
* TODO: Make this non-null * TODO: Make this non-null
* *
@ -144,14 +149,11 @@ public class TermConceptDesignation implements Serializable {
} }
public String getValue() { public String getValue() {
return myValue; return Objects.nonNull(myValueVc) ? myValueVc : myValue;
} }
public TermConceptDesignation setValue(@Nonnull String theValue) { public TermConceptDesignation setValue(@Nonnull String theValueVc) {
ValidateUtil.isNotBlankOrThrowIllegalArgument(theValue, "theValue must not be null or empty"); myValueVc = theValueVc;
ValidateUtil.isNotTooLongOrThrowIllegalArgument(
theValue, MAX_VAL_LENGTH, "Value exceeds maximum length (" + MAX_VAL_LENGTH + "): " + length(theValue));
myValue = theValue;
return this; return this;
} }

View File

@ -440,6 +440,18 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
t.getString("IDX_STRING"))) t.getString("IDX_STRING")))
.setColumnName("HASH_COMPLETE")); .setColumnName("HASH_COMPLETE"));
} }
{
version.onTable("TRM_CONCEPT_DESIG")
.modifyColumn("20240705.10", "VAL")
.nullable()
.withType(ColumnTypeEnum.STRING, 2000);
version.onTable("TRM_CONCEPT_DESIG")
.addColumn("20240705.20", "VAL_VC")
.nullable()
.type(ColumnTypeEnum.TEXT);
}
} }
protected void init720() { protected void init720() {

View File

@ -6,6 +6,7 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@ -48,8 +49,10 @@ import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
@ -1374,6 +1377,27 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
} }
@Test
void termConceptDesignationOver2000CharVal() {
final String stringWith8000Chars = IntStream.range(0, 8000)
.mapToObj(anInt -> "B")
.collect(Collectors.joining());
final TermConceptDesignation termConceptDesignation8000Chars = new TermConceptDesignation()
.setValue(stringWith8000Chars);
myTermConceptDesignationDao.save(termConceptDesignation8000Chars);
final List<TermConceptDesignation> allTermConceptDesignations = myTermConceptDesignationDao.findAll();
assertThat(allTermConceptDesignations)
.hasSize(1);
final TermConceptDesignation termConceptDesignationFromDb = allTermConceptDesignations.get(0);
assertThat(termConceptDesignationFromDb.getValue())
.isEqualTo(stringWith8000Chars);
}
private ArrayList<String> toCodesContains(List<ValueSetExpansionContainsComponent> theContains) { private ArrayList<String> toCodesContains(List<ValueSetExpansionContainsComponent> theContains) {
ArrayList<String> retVal = new ArrayList<>(); ArrayList<String> retVal = new ArrayList<>();
for (ValueSetExpansionContainsComponent next : theContains) { for (ValueSetExpansionContainsComponent next : theContains) {

View File

@ -36,6 +36,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Properties; import java.util.Properties;
import java.util.Set;
import static ca.uhn.fhir.jpa.embedded.HapiEmbeddedDatabasesExtension.FIRST_TESTED_VERSION; import static ca.uhn.fhir.jpa.embedded.HapiEmbeddedDatabasesExtension.FIRST_TESTED_VERSION;
import static ca.uhn.fhir.jpa.migrate.SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME; import static ca.uhn.fhir.jpa.migrate.SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME;
@ -58,10 +59,14 @@ public class HapiSchemaMigrationTest {
private static final String METADATA_IS_NULLABLE_YES = "YES"; private static final String METADATA_IS_NULLABLE_YES = "YES";
private static final String TABLE_HFJ_RES_SEARCH_URL = "HFJ_RES_SEARCH_URL"; private static final String TABLE_HFJ_RES_SEARCH_URL = "HFJ_RES_SEARCH_URL";
private static final String TABLE_TRM_CONCEPT_DESIG = "TRM_CONCEPT_DESIG";
private static final String COLUMN_RES_SEARCH_URL = "RES_SEARCH_URL"; private static final String COLUMN_RES_SEARCH_URL = "RES_SEARCH_URL";
private static final String COLUMN_PARTITION_ID = "PARTITION_ID"; private static final String COLUMN_PARTITION_ID = "PARTITION_ID";
private static final String COLUMN_PARTITION_DATE = "PARTITION_DATE"; private static final String COLUMN_PARTITION_DATE = "PARTITION_DATE";
private static final String COLUMN_VAL = "VAL";
private static final String COLUMN_VAL_VC = "VAL_VC";
private static final String NULL_PLACEHOLDER = "[NULL]"; private static final String NULL_PLACEHOLDER = "[NULL]";
static { static {
@ -120,6 +125,8 @@ public class HapiSchemaMigrationTest {
verifyForcedIdMigration(dataSource); verifyForcedIdMigration(dataSource);
verifyHfjResSearchUrlMigration(database, theDriverType); verifyHfjResSearchUrlMigration(database, theDriverType);
verifyTrm_Concept_Desig(database, theDriverType);
} }
/** /**
@ -176,8 +183,6 @@ public class HapiSchemaMigrationTest {
} }
} }
ourLog.info("6145: actualColumnResults: {}", actualColumnResults);
final List<Map<String,String>> actualPrimaryKeyResults = new ArrayList<>(); final List<Map<String,String>> actualPrimaryKeyResults = new ArrayList<>();
try (final ResultSet primaryKeyResultSet = tableMetaData.getPrimaryKeys(null, null, TABLE_HFJ_RES_SEARCH_URL)) { try (final ResultSet primaryKeyResultSet = tableMetaData.getPrimaryKeys(null, null, TABLE_HFJ_RES_SEARCH_URL)) {
@ -207,6 +212,61 @@ public class HapiSchemaMigrationTest {
} }
} }
private void verifyTrm_Concept_Desig(JpaEmbeddedDatabase theDatabase, DriverTypeEnum theDriverType) throws SQLException {
final List<Map<String, Object>> allCount = theDatabase.query(String.format("SELECT count(*) FROM %s", TABLE_TRM_CONCEPT_DESIG));
final List<Map<String, Object>> nonNullValCount = theDatabase.query(String.format("SELECT count(*) FROM %s WHERE %s IS NOT NULL", TABLE_TRM_CONCEPT_DESIG, COLUMN_VAL));
final List<Map<String, Object>> nullValVcCount = theDatabase.query(String.format("SELECT count(*) FROM %s WHERE %s IS NULL", TABLE_TRM_CONCEPT_DESIG, COLUMN_VAL_VC));
assertThat(nonNullValCount).hasSize(1);
final Collection<Object> queryResultValuesVal = nonNullValCount.get(0).values();
assertThat(queryResultValuesVal).hasSize(1);
final Object queryResultValueVal = queryResultValuesVal.iterator().next();
assertThat(queryResultValueVal).isInstanceOf(Number.class);
if (queryResultValueVal instanceof Number queryResultNumber) {
assertThat(queryResultNumber.intValue()).isEqualTo(1);
}
assertThat(nullValVcCount).hasSize(1);
final Collection<Object> queryResultValuesValVc = nullValVcCount.get(0).values();
assertThat(queryResultValuesValVc).hasSize(1);
final Object queryResultValueValVc = queryResultValuesValVc.iterator().next();
assertThat(queryResultValueValVc).isInstanceOf(Number.class);
if (queryResultValueValVc instanceof Number queryResultNumber) {
assertThat(queryResultNumber.intValue()).isEqualTo(1);
}
final Object allCountValue = allCount.get(0).values().iterator().next();
if (allCountValue instanceof Number allCountNumber) {
assertThat(allCountNumber.intValue()).isEqualTo(1);
}
try (final Connection connection = theDatabase.getDataSource().getConnection()) {
final DatabaseMetaData tableMetaData = connection.getMetaData();
final List<Map<String, String>> actualColumnResults = new ArrayList<>();
try (final ResultSet columnsResultSet = tableMetaData.getColumns(null, null, getTableNameWithDbSpecificCase(theDriverType, TABLE_TRM_CONCEPT_DESIG), null)) {
while (columnsResultSet.next()) {
final Map<String, String> columnMap = new HashMap<>();
actualColumnResults.add(columnMap);
extractAndAddToMap(columnsResultSet, columnMap, METADATA_COLUMN_NAME);
extractAndAddToMap(columnsResultSet, columnMap, METADATA_DATA_TYPE);
extractAndAddToMap(columnsResultSet, columnMap, METADATA_IS_NULLABLE);
extractAndAddToMap(columnsResultSet, columnMap, METADATA_DEFAULT_VALUE);
}
assertThat(actualColumnResults).contains(addExpectedColumnMetadata(COLUMN_VAL, Integer.toString(Types.VARCHAR), METADATA_IS_NULLABLE_YES, null));
assertThat(actualColumnResults).contains(addExpectedColumnMetadata(COLUMN_VAL_VC, getExpectedSqlTypeForValVc(theDriverType), METADATA_IS_NULLABLE_YES, null));
}
}
}
private String getTableNameWithDbSpecificCase(DriverTypeEnum theDriverType, String theTableName) {
return Set.of(DriverTypeEnum.ORACLE_12C, DriverTypeEnum.H2_EMBEDDED).contains(theDriverType)
? theTableName
: theTableName.toLowerCase();
}
@Nonnull @Nonnull
private Map<String, String> addExpectedColumnMetadata(String theColumnName, String theDataType, String theNullable, @Nullable String theDefaultValue) { private Map<String, String> addExpectedColumnMetadata(String theColumnName, String theDataType, String theNullable, @Nullable String theDefaultValue) {
return Map.of(METADATA_COLUMN_NAME, theColumnName, return Map.of(METADATA_COLUMN_NAME, theColumnName,
@ -234,6 +294,12 @@ public class HapiSchemaMigrationTest {
: Integer.toString(Types.DATE); : Integer.toString(Types.DATE);
} }
private String getExpectedSqlTypeForValVc(DriverTypeEnum theDriverType) {
return Set.of(DriverTypeEnum.ORACLE_12C, DriverTypeEnum.H2_EMBEDDED).contains(theDriverType)
? Integer.toString(Types.CLOB)
: Integer.toString(Types.VARCHAR);
}
private void extractAndAddToMap(ResultSet theResultSet, Map<String,String> theMap, String theColumn) throws SQLException { private void extractAndAddToMap(ResultSet theResultSet, Map<String,String> theMap, String theColumn) throws SQLException {
theMap.put(theColumn, Optional.ofNullable(theResultSet.getString(theColumn)) theMap.put(theColumn, Optional.ofNullable(theResultSet.getString(theColumn))
.map(defaultValueNonNull -> defaultValueNonNull.equals("((-1))") ? "-1" : defaultValueNonNull) // MSSQL returns "((-1))" for default value .map(defaultValueNonNull -> defaultValueNonNull.equals("((-1))") ? "-1" : defaultValueNonNull) // MSSQL returns "((-1))" for default value

View File

@ -211,7 +211,8 @@ public class HapiMigrator {
if (isBlank(description)) { if (isBlank(description)) {
description = theTask.getClass().getSimpleName(); description = theTask.getClass().getSimpleName();
} }
String prefix = "Failure executing task \"" + description + "\", aborting! Cause: "; String prefix = String.format(
"Failure executing task '%s', for driver: %s, aborting! Cause: ", description, getDriverType());
throw new HapiMigrationException(Msg.code(47) + prefix + e, theMigrationResult, e); throw new HapiMigrationException(Msg.code(47) + prefix + e, theMigrationResult, e);
} }
} }

View File

@ -1,5 +1,7 @@
package ca.uhn.fhir.jpa.migrate.taskdef; package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.HapiMigrationException; import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
import ca.uhn.fhir.jpa.migrate.JdbcUtils; import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks; import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
@ -77,7 +79,7 @@ public class AddColumnTest extends BaseTest {
@ParameterizedTest(name = "{index}: {0}") @ParameterizedTest(name = "{index}: {0}")
@MethodSource("data") @MethodSource("data")
public void testAddColumnToNonExistentTable_Failing(Supplier<TestDatabaseDetails> theTestDatabaseDetails) { public void testAddColumnToNonExistentTable_Failing(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
before(theTestDatabaseDetails); final DriverTypeEnum driverType = before(theTestDatabaseDetails);
BaseMigrationTasks<VersionEnum> tasks = new BaseMigrationTasks<>(); BaseMigrationTasks<VersionEnum> tasks = new BaseMigrationTasks<>();
tasks tasks
@ -92,7 +94,12 @@ public class AddColumnTest extends BaseTest {
getMigrator().migrate(); getMigrator().migrate();
fail(); fail();
} catch (HapiMigrationException e) { } catch (HapiMigrationException e) {
assertThat(e.getMessage()).startsWith("HAPI-0047: Failure executing task \"Add column FOO_COLUMN on table FOO_TABLE\", aborting! Cause: ca.uhn.fhir.jpa.migrate.HapiMigrationException: HAPI-0061: Failed during task 4.0.0.2001.01: "); final String expectedError =
String.format("%sFailure executing task 'Add column FOO_COLUMN on table FOO_TABLE', for driver: %s, aborting! Cause: ca.uhn.fhir.jpa.migrate.HapiMigrationException: %sFailed during task 4.0.0.2001.01: ",
Msg.code(47),
driverType.name(),
Msg.code(61));
assertThat(e.getMessage()).startsWith(expectedError);
} }
} }

View File

@ -107,7 +107,7 @@ public abstract class BaseTest {
return retVal.stream(); return retVal.stream();
} }
public void before(Supplier<TestDatabaseDetails> theTestDatabaseDetails) { public DriverTypeEnum before(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
TestDatabaseDetails testDatabaseDetails = theTestDatabaseDetails.get(); TestDatabaseDetails testDatabaseDetails = theTestDatabaseDetails.get();
myUrl = testDatabaseDetails.myUrl; myUrl = testDatabaseDetails.myUrl;
myConnectionProperties = testDatabaseDetails.myConnectionProperties; myConnectionProperties = testDatabaseDetails.myConnectionProperties;
@ -117,6 +117,8 @@ public abstract class BaseTest {
myHapiMigrationDao.createMigrationTableIfRequired(); myHapiMigrationDao.createMigrationTableIfRequired();
myHapiMigrationStorageSvc = new HapiMigrationStorageSvc(myHapiMigrationDao); myHapiMigrationStorageSvc = new HapiMigrationStorageSvc(myHapiMigrationDao);
return testDatabaseDetails.getDriverType();
} }
public String getUrl() { public String getUrl() {

View File

@ -1,6 +1,7 @@
package ca.uhn.fhir.jpa.migrate.taskdef; package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.HapiMigrationException; import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
import ca.uhn.fhir.jpa.migrate.JdbcUtils; import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.ParameterizedTest;
@ -11,7 +12,6 @@ import java.util.Set;
import java.util.function.Supplier; import java.util.function.Supplier;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
public class RenameColumnTaskTest extends BaseTest { public class RenameColumnTaskTest extends BaseTest {
@ -148,7 +148,7 @@ public class RenameColumnTaskTest extends BaseTest {
@ParameterizedTest(name = "{index}: {0}") @ParameterizedTest(name = "{index}: {0}")
@MethodSource("data") @MethodSource("data")
public void testBothExistDeleteTargetFirstDataExistsInSourceAndTarget(Supplier<TestDatabaseDetails> theTestDatabaseDetails) { public void testBothExistDeleteTargetFirstDataExistsInSourceAndTarget(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
before(theTestDatabaseDetails); final DriverTypeEnum driverType = before(theTestDatabaseDetails);
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), myTextCol varchar(255))"); executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), myTextCol varchar(255))");
executeSql("INSERT INTO SOMETABLE (PID, TEXTCOL, myTextCol) VALUES (123, 'AAA', 'BBB')"); executeSql("INSERT INTO SOMETABLE (PID, TEXTCOL, myTextCol) VALUES (123, 'AAA', 'BBB')");
@ -165,7 +165,12 @@ public class RenameColumnTaskTest extends BaseTest {
getMigrator().migrate(); getMigrator().migrate();
fail(); fail();
} catch (HapiMigrationException e) { } catch (HapiMigrationException e) {
assertEquals(Msg.code(47) + "Failure executing task \"Drop an index\", aborting! Cause: java.sql.SQLException: " + Msg.code(54) + "Can not rename SOMETABLE.myTextCol to TEXTCOL because both columns exist and data exists in TEXTCOL", e.getMessage()); final String expectedError =
String.format("%sFailure executing task 'Drop an index', for driver: %s, aborting! Cause: java.sql.SQLException: %sCan not rename SOMETABLE.myTextCol to TEXTCOL because both columns exist and data exists in TEXTCOL",
Msg.code(47),
driverType.name(),
Msg.code(54));
assertThat(e.getMessage()).isEqualTo(expectedError);
} }
} }
@ -234,7 +239,7 @@ public class RenameColumnTaskTest extends BaseTest {
@ParameterizedTest(name = "{index}: {0}") @ParameterizedTest(name = "{index}: {0}")
@MethodSource("data") @MethodSource("data")
public void testNeitherColumnExists(Supplier<TestDatabaseDetails> theTestDatabaseDetails) { public void testNeitherColumnExists(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
before(theTestDatabaseDetails); final DriverTypeEnum driverType = before(theTestDatabaseDetails);
executeSql("create table SOMETABLE (PID bigint not null)"); executeSql("create table SOMETABLE (PID bigint not null)");
@ -248,7 +253,12 @@ public class RenameColumnTaskTest extends BaseTest {
getMigrator().migrate(); getMigrator().migrate();
fail(); fail();
} catch (HapiMigrationException e) { } catch (HapiMigrationException e) {
assertEquals(Msg.code(47) + "Failure executing task \"RenameColumnTask\", aborting! Cause: java.sql.SQLException: " + Msg.code(56) + "Can not rename SOMETABLE.myTextCol to TEXTCOL because neither column exists!", e.getMessage()); final String expectedError =
String.format("%sFailure executing task 'RenameColumnTask', for driver: %s, aborting! Cause: java.sql.SQLException: %sCan not rename SOMETABLE.myTextCol to TEXTCOL because neither column exists!",
Msg.code(47),
driverType.name(),
Msg.code(56));
assertThat(e.getMessage()).isEqualTo(expectedError);
} }
@ -274,7 +284,7 @@ public class RenameColumnTaskTest extends BaseTest {
@ParameterizedTest(name = "{index}: {0}") @ParameterizedTest(name = "{index}: {0}")
@MethodSource("data") @MethodSource("data")
public void testBothColumnsExist(Supplier<TestDatabaseDetails> theTestDatabaseDetails) { public void testBothColumnsExist(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
before(theTestDatabaseDetails); final DriverTypeEnum driverType = before(theTestDatabaseDetails);
executeSql("create table SOMETABLE (PID bigint not null, PID2 bigint)"); executeSql("create table SOMETABLE (PID bigint not null, PID2 bigint)");
@ -288,7 +298,12 @@ public class RenameColumnTaskTest extends BaseTest {
getMigrator().migrate(); getMigrator().migrate();
fail(); fail();
} catch (HapiMigrationException e) { } catch (HapiMigrationException e) {
assertEquals(Msg.code(47) + "Failure executing task \"RenameColumnTask\", aborting! Cause: java.sql.SQLException: " + Msg.code(55) + "Can not rename SOMETABLE.PID to PID2 because both columns exist!", e.getMessage()); final String expectedError =
String.format("%sFailure executing task 'RenameColumnTask', for driver: %s, aborting! Cause: java.sql.SQLException: %sCan not rename SOMETABLE.PID to PID2 because both columns exist!",
Msg.code(47),
driverType.name(),
Msg.code(55));
assertThat(e.getMessage()).isEqualTo(expectedError);
} }