Migration: Add new indexes to hfj_spidx_string and hfj_spidx_uri if collation is NOT 'C' (#5544)

* First commit for conditional logic in execute migration tasks along with logic to create new indexes on hfj_spidx_string and hfj_spidx_uri if sp_normalized_value or sp_uri, respectively, have a collation other than "C".

* More fixes.  Still TODOs to resolve.

* Add changelog.  Clean up TODOs.  New message code for precondition SQL checking.  javadoc.  Unit test enhancements.

* Spotless.  Small tweaks.  Fix conditional logic for more than one checking.

* Add details to upgrade document.

* Code review feedback.

* Spotless
This commit is contained in:
Luke deGruchy 2023-12-14 16:04:15 -05:00 committed by GitHub
parent 9f81fa334c
commit c4ac940e14
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 430 additions and 43 deletions

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 5547
title: "Previously LIKE queries against resources would perform poorly on PostgreSQL if the database locale/collation was not 'C'.
This has been resolved by checking hfj_spidx_string.sp_value_normalized and hfj_spidx_uri.sp_uri column
collations during migration and if either or both are non C, create a new btree varchar_pattern_ops on the
hash values. If both column collations are 'C', do not create any new indexes."

View File

@ -1 +1,18 @@
This release contains a large breaking change for authors of interceptors. Internally, HAPI-FHIR has swapped from using `javax.*` to `jakarta.*` packages. Please see [the migration guide](/hapi-fhir/docs/interceptors/jakarta_upgrade.html) for more information. Without manual intervention, the majority of interceptors will fail at runtime unless they are upgraded.
## Possible New Indexes on PostgresSQL
* This affects only clients running PostgreSQL who have a locale/collation that is NOT 'C'
* For those clients, the migration will detect this condition and add new indexes to:
* hfj_spidx_string
* hfj_spidx_uri
* This is meant to address performance issues for these clients on GET queries whose resulting SQL uses "LIKE" clauses
These are the new indexes that will be created:
```sql
CREATE INDEX idx_sp_string_hash_nrm_pattern_ops ON public.hfj_spidx_string USING btree (hash_norm_prefix, sp_value_normalized varchar_pattern_ops, res_id, partition_id);
```
```sql
CREATE UNIQUE INDEX idx_sp_uri_hash_identity_pattern_ops ON public.hfj_spidx_uri USING btree (hash_identity, sp_uri varchar_pattern_ops, res_id, partition_id);
```

View File

@ -62,6 +62,28 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys
public static final DriverTypeEnum[] NON_AUTOMATIC_FK_INDEX_PLATFORMS =
new DriverTypeEnum[] {DriverTypeEnum.POSTGRES_9_4, DriverTypeEnum.ORACLE_12C, DriverTypeEnum.MSSQL_2012};
private static final String QUERY_FOR_COLUMN_COLLATION_TEMPLATE = "WITH defcoll AS (\n"
+ " SELECT datcollate AS coll\n"
+ " FROM pg_database\n"
+ " WHERE datname = current_database())\n"
+ ", collation_by_column AS (\n"
+ " SELECT a.attname,\n"
+ " CASE WHEN c.collname = 'default'\n"
+ " THEN defcoll.coll\n"
+ " ELSE c.collname\n"
+ " END AS my_collation\n"
+ " FROM pg_attribute AS a\n"
+ " CROSS JOIN defcoll\n"
+ " LEFT JOIN pg_collation AS c ON a.attcollation = c.oid\n"
+ " WHERE a.attrelid = '%s'::regclass\n"
+ " AND a.attnum > 0\n"
+ " AND attname = '%s'\n"
+ ")\n"
+ "SELECT TRUE as result\n"
+ "FROM collation_by_column\n"
+ "WHERE EXISTS (SELECT 1\n"
+ " FROM collation_by_column\n"
+ " WHERE my_collation != 'C')";
private final Set<FlagEnum> myFlags;
/**
@ -141,6 +163,30 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
batch2JobInstanceTable.addColumn("20231128.1", "USER_NAME").nullable().type(ColumnTypeEnum.STRING, 200);
batch2JobInstanceTable.addColumn("20231128.2", "CLIENT_ID").nullable().type(ColumnTypeEnum.STRING, 200);
{
version.executeRawSql(
"20231212.1",
"CREATE INDEX idx_sp_string_hash_nrm_pattern_ops ON public.hfj_spidx_string USING btree (hash_norm_prefix, sp_value_normalized varchar_pattern_ops, res_id, partition_id)")
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
.onlyIf(
String.format(
QUERY_FOR_COLUMN_COLLATION_TEMPLATE,
"HFJ_SPIDX_STRING".toLowerCase(),
"SP_VALUE_NORMALIZED".toLowerCase()),
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing");
version.executeRawSql(
"20231212.2",
"CREATE UNIQUE INDEX idx_sp_uri_hash_identity_pattern_ops ON public.hfj_spidx_uri USING btree (hash_identity, sp_uri varchar_pattern_ops, res_id, partition_id)")
.onlyAppliesToPlatforms(DriverTypeEnum.POSTGRES_9_4)
.onlyIf(
String.format(
QUERY_FOR_COLUMN_COLLATION_TEMPLATE,
"HFJ_SPIDX_URI".toLowerCase(),
"SP_URI".toLowerCase()),
"Column HFJ_SPIDX_STRING.SP_VALUE_NORMALIZED already has a collation of 'C' so doing nothing");
}
}
protected void init680() {

View File

@ -0,0 +1,66 @@
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.i18n.Msg;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import java.util.List;
import java.util.Optional;
/**
* Utility methods to be used by migrator functionality that needs to invoke JDBC directly.
*/
public class MigrationJdbcUtils {
private static final Logger ourLog = LoggerFactory.getLogger(MigrationJdbcUtils.class);
public static boolean queryForSingleBooleanResultMultipleThrowsException(
String theSql, JdbcTemplate theJdbcTemplate) {
final RowMapper<Boolean> booleanRowMapper = (theResultSet, theRowNumber) -> theResultSet.getBoolean(1);
return queryForSingle(theSql, theJdbcTemplate, booleanRowMapper).orElse(false);
}
private static <T> Optional<T> queryForSingle(
String theSql, JdbcTemplate theJdbcTemplate, RowMapper<T> theRowMapper) {
final List<T> results = queryForMultiple(theSql, theJdbcTemplate, theRowMapper);
if (results.isEmpty()) {
return Optional.empty();
}
if (results.size() > 1) {
// Presumably other callers may want different behaviour but in this case more than one result should be
// considered a hard failure distinct from an empty result, which is one expected outcome.
throw new IllegalArgumentException(Msg.code(2474)
+ String.format(
"Failure due to query returning more than one result: %s for SQL: [%s].", results, theSql));
}
return Optional.ofNullable(results.get(0));
}
private static <T> List<T> queryForMultiple(
String theSql, JdbcTemplate theJdbcTemplate, RowMapper<T> theRowMapper) {
return theJdbcTemplate.query(theSql, theRowMapper);
}
}

View File

@ -250,6 +250,8 @@ public abstract class BaseTask {
return getConnectionProperties().newJdbcTemplate();
}
private final List<ExecuteTaskPrecondition> myPreconditions = new ArrayList<>();
public void execute() throws SQLException {
if (myDoNothing) {
ourLog.info("Skipping stubbed task: {}", getDescription());
@ -257,7 +259,17 @@ public abstract class BaseTask {
}
if (!myOnlyAppliesToPlatforms.isEmpty()) {
if (!myOnlyAppliesToPlatforms.contains(getDriverType())) {
ourLog.debug("Skipping task {} as it does not apply to {}", getDescription(), getDriverType());
ourLog.info("Skipping task {} as it does not apply to {}", getDescription(), getDriverType());
return;
}
}
for (ExecuteTaskPrecondition precondition : myPreconditions) {
ourLog.debug("precondition to evaluate: {}", precondition);
if (!precondition.getPreconditionRunner().get()) {
ourLog.info(
"Skipping task since one of the preconditions was not met: {}",
precondition.getPreconditionReason());
return;
}
}
@ -305,6 +317,10 @@ public abstract class BaseTask {
return this;
}
public void addPrecondition(ExecuteTaskPrecondition thePrecondition) {
myPreconditions.add(thePrecondition);
}
@Override
public final int hashCode() {
HashCodeBuilder builder = new HashCodeBuilder();

View File

@ -0,0 +1,74 @@
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.migrate.taskdef;
import java.util.Objects;
import java.util.StringJoiner;
import java.util.function.Supplier;
/**
* Contains a pre-built precondition to evaluate once {@link BaseTask#execute()} is called.
* <p/>
* Includes both a {@link Supplier} containing the logic to determine if the precondition evaluates to true or false and
* a reason String to output to the logs if the precondition evaluates to false and halts execution of the task.
*/
public class ExecuteTaskPrecondition {
private final Supplier<Boolean> myPreconditionRunner;
private final String myPreconditionReason;
public ExecuteTaskPrecondition(Supplier<Boolean> thePreconditionRunner, String thePreconditionReason) {
myPreconditionRunner = thePreconditionRunner;
myPreconditionReason = thePreconditionReason;
}
public Supplier<Boolean> getPreconditionRunner() {
return myPreconditionRunner;
}
public String getPreconditionReason() {
return myPreconditionReason;
}
@Override
public boolean equals(Object theO) {
if (this == theO) {
return true;
}
if (theO == null || getClass() != theO.getClass()) {
return false;
}
ExecuteTaskPrecondition that = (ExecuteTaskPrecondition) theO;
return Objects.equals(myPreconditionRunner, that.myPreconditionRunner)
&& Objects.equals(myPreconditionReason, that.myPreconditionReason);
}
@Override
public int hashCode() {
return Objects.hash(myPreconditionRunner, myPreconditionReason);
}
@Override
public String toString() {
return new StringJoiner(", ", ExecuteTaskPrecondition.class.getSimpleName() + "[", "]")
.add("myPreconditionRunner=" + myPreconditionRunner)
.add("myPreconditionReason='" + myPreconditionReason + "'")
.toString();
}
}

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.migrate.tasks.api;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.MigrationJdbcUtils;
import ca.uhn.fhir.jpa.migrate.taskdef.AddColumnTask;
import ca.uhn.fhir.jpa.migrate.taskdef.AddForeignKeyTask;
import ca.uhn.fhir.jpa.migrate.taskdef.AddIdGeneratorTask;
@ -36,6 +37,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.DropIdGeneratorTask;
import ca.uhn.fhir.jpa.migrate.taskdef.DropIndexTask;
import ca.uhn.fhir.jpa.migrate.taskdef.DropTableTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteRawSqlTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteTaskPrecondition;
import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask;
import ca.uhn.fhir.jpa.migrate.taskdef.MigratePostgresTextClobToBinaryClobTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ModifyColumnTask;
@ -44,6 +46,8 @@ import ca.uhn.fhir.jpa.migrate.taskdef.RenameColumnTask;
import ca.uhn.fhir.jpa.migrate.taskdef.RenameIndexTask;
import org.apache.commons.lang3.Validate;
import org.intellij.lang.annotations.Language;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.Collections;
@ -54,6 +58,7 @@ import java.util.Set;
import java.util.stream.Collectors;
public class Builder {
private static final Logger ourLog = LoggerFactory.getLogger(Builder.class);
private final String myRelease;
private final BaseMigrationTasks.IAcceptsTasks mySink;
@ -571,6 +576,40 @@ public class Builder {
return this;
}
/**
* Introduce precondition checking logic into the execution of the enclosed task. This conditional logic will
* be implemented by running an SQL SELECT (including CTEs) to obtain a boolean indicating whether a certain
* condition has been met.
* One example is to check for a specific collation on a column to decide whether to create a new index.
* <p/>
* This method may be called multiple times to add multiple preconditions. The precondition that evaluates to
* false will stop execution of the task irrespective of any or all other tasks evaluating to true.
*
* @param theSql The SELECT or CTE used to determine if the precondition is valid.
* @param reason A String to indicate the text that is logged if the precondition is not met.
* @return The BuilderCompleteTask in order to chain further method calls on this builder.
*/
public BuilderCompleteTask onlyIf(@Language("SQL") String theSql, String reason) {
if (!theSql.toUpperCase().startsWith("WITH")
&& !theSql.toUpperCase().startsWith("SELECT")) {
throw new IllegalArgumentException(Msg.code(2455)
+ String.format(
"Only SELECT statements (including CTEs) are allowed here. Please check your SQL: [%s]",
theSql));
}
ourLog.info("SQL to evaluate: {}", theSql);
myTask.addPrecondition(new ExecuteTaskPrecondition(
() -> {
ourLog.info("Checking precondition for SQL: {}", theSql);
return MigrationJdbcUtils.queryForSingleBooleanResultMultipleThrowsException(
theSql, myTask.newJdbcTemplate());
},
reason));
return this;
}
public BuilderCompleteTask runEvenDuringSchemaInitialization() {
myTask.setRunDuringSchemaInitialization(true);
return this;

View File

@ -7,8 +7,10 @@ import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import org.apache.commons.dbcp2.BasicDataSource;
import org.h2.Driver;
import org.intellij.lang.annotations.Language;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.params.provider.Arguments;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.ColumnMapRowMapper;
@ -25,8 +27,53 @@ import java.util.stream.Stream;
public abstract class BaseTest {
private static final String DATABASE_NAME = "DATABASE";
static final String H2 = "H2";
static final String DERBY = "Derby";
private static final Logger ourLog = LoggerFactory.getLogger(BaseTest.class);
private static int ourDatabaseUrl = 0;
private static final Supplier<TestDatabaseDetails> TEST_DATABASE_DETAILS_DERBY_SUPPLIER = new Supplier<>() {
@Override
public TestDatabaseDetails get() {
ourLog.info("Derby: {}", DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
String url = "jdbc:derby:memory:" + DATABASE_NAME + ourDatabaseUrl++ + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "SA", "SA");
BasicDataSource dataSource = new BasicDataSource();
dataSource.setUrl(url);
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@Override
public String toString() {
return DERBY;
}
};
private static final Supplier<TestDatabaseDetails> TEST_DATABASE_DETAILS_H2_SUPPLIER = new Supplier<>() {
@Override
public TestDatabaseDetails get() {
ourLog.info("H2: {}", Driver.class);
String url = "jdbc:h2:mem:" + DATABASE_NAME + ourDatabaseUrl++;
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "SA", "SA");
BasicDataSource dataSource = new BasicDataSource();
dataSource.setUrl(url);
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.H2_EMBEDDED.getDriverClassName());
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.H2_EMBEDDED);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@Override
public String toString() {
return H2;
}
};
private BasicDataSource myDataSource;
private String myUrl;
private HapiMigrator myMigrator;
@ -34,54 +81,28 @@ public abstract class BaseTest {
protected HapiMigrationDao myHapiMigrationDao;
protected HapiMigrationStorageSvc myHapiMigrationStorageSvc;
public static Stream<Arguments> dataWithEvaluationResults() {
return Stream.of(
Arguments.of(TEST_DATABASE_DETAILS_H2_SUPPLIER, List.of(true, true), true),
Arguments.of(TEST_DATABASE_DETAILS_H2_SUPPLIER, List.of(false, true), false),
Arguments.of(TEST_DATABASE_DETAILS_H2_SUPPLIER, List.of(true, false), false),
Arguments.of(TEST_DATABASE_DETAILS_H2_SUPPLIER, List.of(false, false), false),
Arguments.of(TEST_DATABASE_DETAILS_DERBY_SUPPLIER, List.of(true, true), true),
Arguments.of(TEST_DATABASE_DETAILS_DERBY_SUPPLIER, List.of(false, true), false),
Arguments.of(TEST_DATABASE_DETAILS_DERBY_SUPPLIER, List.of(true, false), false),
Arguments.of(TEST_DATABASE_DETAILS_DERBY_SUPPLIER, List.of(false, false), false)
);
}
public static Stream<Supplier<TestDatabaseDetails>> data() {
ArrayList<Supplier<TestDatabaseDetails>> retVal = new ArrayList<>();
// H2
retVal.add(new Supplier<TestDatabaseDetails>() {
@Override
public TestDatabaseDetails get() {
ourLog.info("H2: {}", org.h2.Driver.class.toString());
String url = "jdbc:h2:mem:" + DATABASE_NAME + ourDatabaseUrl++;
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "SA", "SA");
BasicDataSource dataSource = new BasicDataSource();
dataSource.setUrl(url);
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.H2_EMBEDDED.getDriverClassName());
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.H2_EMBEDDED);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@Override
public String toString() {
return "H2";
}
});
retVal.add(TEST_DATABASE_DETAILS_H2_SUPPLIER);
// Derby
retVal.add(new Supplier<TestDatabaseDetails>() {
@Override
public TestDatabaseDetails get() {
ourLog.info("Derby: {}", DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
String url = "jdbc:derby:memory:" + DATABASE_NAME + ourDatabaseUrl++ + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "SA", "SA");
BasicDataSource dataSource = new BasicDataSource();
dataSource.setUrl(url);
dataSource.setUsername("SA");
dataSource.setPassword("SA");
dataSource.setDriverClassName(DriverTypeEnum.DERBY_EMBEDDED.getDriverClassName());
HapiMigrator migrator = new HapiMigrator(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME, dataSource, DriverTypeEnum.DERBY_EMBEDDED);
return new TestDatabaseDetails(url, connectionProperties, dataSource, migrator);
}
@Override
public String toString() {
return "Derby";
}
});
retVal.add(TEST_DATABASE_DETAILS_DERBY_SUPPLIER);
return retVal.stream();
}

View File

@ -2,9 +2,12 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
import ca.uhn.fhir.util.VersionEnum;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
@ -12,9 +15,11 @@ import java.util.Map;
import java.util.function.Supplier;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
public class ExecuteRawSqlTaskTest extends BaseTest {
private static final Logger ourLog = LoggerFactory.getLogger(ExecuteRawSqlTaskTest.class);
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("data")
@ -135,4 +140,100 @@ public class ExecuteRawSqlTaskTest extends BaseTest {
assertEquals(0, output.size());
}
@ParameterizedTest()
@MethodSource("dataWithEvaluationResults")
public void testExecuteRawSqlTaskWithPrecondition(Supplier<TestDatabaseDetails> theTestDatabaseDetails, List<Boolean> thePreconditionOutcomes, boolean theIsExecutionExpected) {
before(theTestDatabaseDetails);
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
final List<Map<String, Object>> outputPreMigrate = executeQuery("SELECT PID,TEXTCOL FROM SOMETABLE");
assertTrue(outputPreMigrate.isEmpty());
final String someFakeUpdateSql = "INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (123, 'abc')";
final String someReason = "I dont feel like it!";
final BaseMigrationTasks<VersionEnum> tasks = new BaseMigrationTasks<>();
final Builder.BuilderCompleteTask builderCompleteTask = tasks.forVersion(VersionEnum.V4_0_0)
.executeRawSql("2024.02", someFakeUpdateSql);
for (boolean preconditionOutcome: thePreconditionOutcomes) {
final String someFakeSelectSql =
String.format("SELECT %s %s", preconditionOutcome,
(BaseTest.DERBY.equals(theTestDatabaseDetails.toString())) ? "FROM SYSIBM.SYSDUMMY1" : "");
builderCompleteTask.onlyIf(someFakeSelectSql, someReason);
}
getMigrator().addTasks(tasks.getTaskList(VersionEnum.V0_1, VersionEnum.V4_0_0));
getMigrator().migrate();
final List<Map<String, Object>> outputPostMigrate = executeQuery("SELECT PID,TEXTCOL FROM SOMETABLE");
if (theIsExecutionExpected) {
assertEquals(1, outputPostMigrate.size());
assertEquals(123L, outputPostMigrate.get(0).get("PID"));
assertEquals("abc", outputPostMigrate.get(0).get("TEXTCOL"));
} else {
assertTrue(outputPreMigrate.isEmpty());
}
}
@ParameterizedTest()
@MethodSource("data")
public void testExecuteRawSqlTaskWithPreconditionInvalidPreconditionSql(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
before(theTestDatabaseDetails);
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
final List<Map<String, Object>> outputPreMigrate = executeQuery("SELECT PID,TEXTCOL FROM SOMETABLE");
assertTrue(outputPreMigrate.isEmpty());
final String someFakeUpdateSql = "INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (123, 'abc')";
final String someFakeSelectSql = "UPDATE SOMETABLE SET PID = 1";
final String someReason = "I dont feel like it!";
try {
final BaseMigrationTasks<VersionEnum> tasks = new BaseMigrationTasks<>();
tasks.forVersion(VersionEnum.V4_0_0)
.executeRawSql("2024.02", someFakeUpdateSql)
.onlyIf(someFakeSelectSql, someReason);
fail();
} catch (IllegalArgumentException exception) {
assertEquals("HAPI-2455: Only SELECT statements (including CTEs) are allowed here. Please check your SQL: [UPDATE SOMETABLE SET PID = 1]", exception.getMessage());
}
}
@ParameterizedTest()
@MethodSource("data")
public void testExecuteRawSqlTaskWithPreconditionPreconditionSqlReturnsMultiple(Supplier<TestDatabaseDetails> theTestDatabaseDetails) {
before(theTestDatabaseDetails);
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
executeSql("INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (123, 'abc')");
executeSql("INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (456, 'def')");
final List<Map<String, Object>> outputPreMigrate = executeQuery("SELECT PID,TEXTCOL FROM SOMETABLE");
assertEquals(2, outputPreMigrate.size());
final String someFakeUpdateSql = "INSERT INTO SOMETABLE (PID, TEXTCOL) VALUES (789, 'xyz')";
final String someFakeSelectSql = "SELECT PID != 0 FROM SOMETABLE";
final String someReason = "I dont feel like it!";
final BaseMigrationTasks<VersionEnum> tasks = new BaseMigrationTasks<>();
final Builder.BuilderCompleteTask builderCompleteTask = tasks.forVersion(VersionEnum.V4_0_0)
.executeRawSql("2024.02", someFakeUpdateSql);
builderCompleteTask.onlyIf(someFakeSelectSql, someReason);
getMigrator().addTasks(tasks.getTaskList(VersionEnum.V0_1, VersionEnum.V4_0_0));
try {
getMigrator().migrate();
fail();
} catch (IllegalArgumentException exception) {
assertEquals("HAPI-2474: Failure due to query returning more than one result: [true, true] for SQL: [SELECT PID != 0 FROM SOMETABLE].", exception.getMessage());
}
}
}