Address SQL syntax issue

This commit is contained in:
James Agnew 2018-11-29 08:36:59 -05:00
parent b41c222880
commit 650872cd3e
15 changed files with 144 additions and 55 deletions

View File

@ -43,6 +43,14 @@
<classifier>classes</classifier> <classifier>classes</classifier>
</dependency> </dependency>
<!--
<dependency>
<groupId>com.oracle</groupId>
<artifactId>ojbc8g</artifactId>
<version>12.2.0.1</version>
</dependency>
-->
</dependencies> </dependencies>
<build> <build>

View File

@ -50,7 +50,7 @@ public class JdbcUtils {
DatabaseMetaData metadata; DatabaseMetaData metadata;
try { try {
metadata = connection.getMetaData(); metadata = connection.getMetaData();
ResultSet indexes = metadata.getIndexInfo(null, null, theTableName, false, true); ResultSet indexes = metadata.getIndexInfo(connection.getCatalog(), connection.getSchema(), theTableName, false, true);
Set<String> indexNames = new HashSet<>(); Set<String> indexNames = new HashSet<>();
while (indexes.next()) { while (indexes.next()) {
@ -78,7 +78,7 @@ public class JdbcUtils {
DatabaseMetaData metadata; DatabaseMetaData metadata;
try { try {
metadata = connection.getMetaData(); metadata = connection.getMetaData();
ResultSet indexes = metadata.getIndexInfo(null, null, theTableName, false, false); ResultSet indexes = metadata.getIndexInfo(connection.getCatalog(), connection.getSchema(), theTableName, false, false);
while (indexes.next()) { while (indexes.next()) {
String indexName = indexes.getString("INDEX_NAME"); String indexName = indexes.getString("INDEX_NAME");
@ -107,7 +107,9 @@ public class JdbcUtils {
DatabaseMetaData metadata; DatabaseMetaData metadata;
try { try {
metadata = connection.getMetaData(); metadata = connection.getMetaData();
ResultSet indexes = metadata.getColumns(null, null, null, null); String catalog = connection.getCatalog();
String schema = connection.getSchema();
ResultSet indexes = metadata.getColumns(catalog, schema, theTableName, null);
while (indexes.next()) { while (indexes.next()) {
@ -158,7 +160,7 @@ public class JdbcUtils {
DatabaseMetaData metadata; DatabaseMetaData metadata;
try { try {
metadata = connection.getMetaData(); metadata = connection.getMetaData();
ResultSet indexes = metadata.getCrossReference(null, null, theTableName, null, null, theForeignTable); ResultSet indexes = metadata.getCrossReference(connection.getCatalog(), connection.getSchema(), theTableName, connection.getCatalog(), connection.getSchema(), theForeignTable);
Set<String> columnNames = new HashSet<>(); Set<String> columnNames = new HashSet<>();
while (indexes.next()) { while (indexes.next()) {
@ -194,7 +196,7 @@ public class JdbcUtils {
DatabaseMetaData metadata; DatabaseMetaData metadata;
try { try {
metadata = connection.getMetaData(); metadata = connection.getMetaData();
ResultSet indexes = metadata.getColumns(null, null, null, null); ResultSet indexes = metadata.getColumns(connection.getCatalog(), connection.getSchema(), theTableName, null);
Set<String> columnNames = new HashSet<>(); Set<String> columnNames = new HashSet<>();
while (indexes.next()) { while (indexes.next()) {
@ -223,7 +225,7 @@ public class JdbcUtils {
DatabaseMetaData metadata; DatabaseMetaData metadata;
try { try {
metadata = connection.getMetaData(); metadata = connection.getMetaData();
ResultSet tables = metadata.getTables(null, null, null, null); ResultSet tables = metadata.getTables(connection.getCatalog(), connection.getSchema(), null, null);
Set<String> columnNames = new HashSet<>(); Set<String> columnNames = new HashSet<>();
while (tables.next()) { while (tables.next()) {
@ -254,7 +256,7 @@ public class JdbcUtils {
DatabaseMetaData metadata; DatabaseMetaData metadata;
try { try {
metadata = connection.getMetaData(); metadata = connection.getMetaData();
ResultSet tables = metadata.getColumns(null, null, null, null); ResultSet tables = metadata.getColumns(connection.getCatalog(), connection.getSchema(), theTableName, theColumnName);
while (tables.next()) { while (tables.next()) {
String tableName = toUpperCase(tables.getString("TABLE_NAME"), Locale.US); String tableName = toUpperCase(tables.getString("TABLE_NAME"), Locale.US);

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.migrate;
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -28,6 +28,7 @@ import org.slf4j.LoggerFactory;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects;
public class Migrator { public class Migrator {
@ -40,6 +41,7 @@ public class Migrator {
private DriverTypeEnum.ConnectionProperties myConnectionProperties; private DriverTypeEnum.ConnectionProperties myConnectionProperties;
private int myChangesCount; private int myChangesCount;
private boolean myDryRun; private boolean myDryRun;
private List<BaseTask.ExecutedStatement> myExecutedStatements = new ArrayList<>();
public int getChangesCount() { public int getChangesCount() {
return myChangesCount; return myChangesCount;
@ -74,7 +76,7 @@ public class Migrator {
myConnectionProperties = myDriverType.newConnectionProperties(myConnectionUrl, myUsername, myPassword); myConnectionProperties = myDriverType.newConnectionProperties(myConnectionUrl, myUsername, myPassword);
try { try {
for (BaseTask next : myTasks) { for (BaseTask<?> next : myTasks) {
next.setDriverType(myDriverType); next.setDriverType(myDriverType);
next.setConnectionProperties(myConnectionProperties); next.setConnectionProperties(myConnectionProperties);
next.setDryRun(myDryRun); next.setDryRun(myDryRun);
@ -85,12 +87,33 @@ public class Migrator {
} }
myChangesCount += next.getChangesCount(); myChangesCount += next.getChangesCount();
myExecutedStatements.addAll(next.getExecutedStatements());
} }
} finally { } finally {
myConnectionProperties.close(); myConnectionProperties.close();
} }
ourLog.info("Finished migration of {} tasks", myTasks.size()); ourLog.info("Finished migration of {} tasks", myTasks.size());
if (myDryRun) {
StringBuilder statementBuilder = new StringBuilder();
String lastTable = null;
for (BaseTask.ExecutedStatement next : myExecutedStatements) {
if (!Objects.equals(lastTable, next.getTableName())) {
statementBuilder.append("\n\n-- Table: ").append(next.getTableName()).append("\n");
lastTable = next.getTableName();
}
statementBuilder.append(next.getSql()).append(";\n");
for (Object nextArg : next.getArguments()) {
statementBuilder.append(" -- Arg: ").append(nextArg).append("\n");
}
}
ourLog.info("SQL that would be executed:\n\n***********************************\n{}***********************************", statementBuilder);
}
} }
} }

View File

@ -46,9 +46,22 @@ public class AddColumnTask extends BaseTableColumnTypeTask<AddColumnTask> {
nullable = ""; nullable = "";
} }
String sql = "alter table " + getTableName() + " add column " + getColumnName() + " " + type + " " + nullable; String sql = "";
switch (getDriverType()) {
case DERBY_EMBEDDED:
case MARIADB_10_1:
case MYSQL_5_7:
case POSTGRES_9_4:
sql = "alter table " + getTableName() + " add column " + getColumnName() + " " + type + " " + nullable;
break;
case MSSQL_2012:
case ORACLE_12C:
sql = "alter table " + getTableName() + " add " + getColumnName() + " " + type + " " + nullable;
break;
}
ourLog.info("Adding column {} of type {} to table {}", getColumnName(), type, getTableName()); ourLog.info("Adding column {} of type {} to table {}", getColumnName(), type, getTableName());
executeSql(sql); executeSql(getTableName(), sql);
} }
} }

View File

@ -83,7 +83,7 @@ public class AddForeignKeyTask extends BaseTableColumnTask<AddForeignKeyTask> {
try { try {
executeSql(sql); executeSql(getTableName(), sql);
} catch (Exception e) { } catch (Exception e) {
if (e.toString().contains("already exists")) { if (e.toString().contains("already exists")) {
ourLog.warn("Index {} already exists", myConstraintName); ourLog.warn("Index {} already exists", myConstraintName);

View File

@ -67,12 +67,13 @@ public class AddIndexTask extends BaseTableTask<AddIndexTask> {
return; return;
} }
String unique = myUnique ? "UNIQUE " : ""; String unique = myUnique ? "unique " : "";
String columns = String.join(", ", myColumns); String columns = String.join(", ", myColumns);
String sql = "CREATE " + unique + " INDEX " + myIndexName + " ON " + getTableName() + "(" + columns + ")"; String sql = "create " + unique + "index " + myIndexName + " on " + getTableName() + "(" + columns + ")";
String tableName = getTableName();
try { try {
executeSql(sql); executeSql(tableName, sql);
} catch (Exception e) { } catch (Exception e) {
if (e.toString().contains("already exists")) { if (e.toString().contains("already exists")) {
ourLog.warn("Index {} already exists", myIndexName); ourLog.warn("Index {} already exists", myIndexName);

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -38,11 +38,13 @@ public class ArbitrarySqlTask extends BaseTask<ArbitrarySqlTask> {
private static final Logger ourLog = LoggerFactory.getLogger(ArbitrarySqlTask.class); private static final Logger ourLog = LoggerFactory.getLogger(ArbitrarySqlTask.class);
private final String myDescription; private final String myDescription;
private final String myTableName;
private List<Task> myTask = new ArrayList<>(); private List<Task> myTask = new ArrayList<>();
private int myBatchSize = 1000; private int myBatchSize = 1000;
private String myExecuteOnlyIfTableExists; private String myExecuteOnlyIfTableExists;
public ArbitrarySqlTask(String theDescription) { public ArbitrarySqlTask(String theTableName, String theDescription) {
myTableName = theTableName;
myDescription = theDescription; myDescription = theDescription;
} }
@ -104,7 +106,6 @@ public class ArbitrarySqlTask extends BaseTask<ArbitrarySqlTask> {
@Override @Override
public void execute() { public void execute() {
if (isDryRun()) { if (isDryRun()) {
logDryRunSql(mySql);
return; return;
} }

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -28,6 +28,10 @@ import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.support.TransactionTemplate; import org.springframework.transaction.support.TransactionTemplate;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public abstract class BaseTask<T extends BaseTask> { public abstract class BaseTask<T extends BaseTask> {
@ -37,6 +41,7 @@ public abstract class BaseTask<T extends BaseTask> {
private String myDescription; private String myDescription;
private int myChangesCount; private int myChangesCount;
private boolean myDryRun; private boolean myDryRun;
private List<ExecutedStatement> myExecutedStatements = new ArrayList<>();
public boolean isDryRun() { public boolean isDryRun() {
return myDryRun; return myDryRun;
@ -56,29 +61,36 @@ public abstract class BaseTask<T extends BaseTask> {
return (T) this; return (T) this;
} }
public List<ExecutedStatement> getExecutedStatements() {
return myExecutedStatements;
}
public int getChangesCount() { public int getChangesCount() {
return myChangesCount; return myChangesCount;
} }
public void executeSql(@Language("SQL") String theSql, Object... theArguments) { /**
if (isDryRun()) { * @param theTableName This is only used for logging currently
logDryRunSql(theSql); * @param theSql The SQL statement
return; * @param theArguments The SQL statement arguments
*/
public void executeSql(String theTableName, @Language("SQL") String theSql, Object... theArguments) {
if (isDryRun() == false) {
Integer changes = getConnectionProperties().getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
int changesCount = jdbcTemplate.update(theSql, theArguments);
ourLog.info("SQL \"{}\" returned {}", theSql, changesCount);
return changesCount;
});
myChangesCount += changes;
} }
Integer changes = getConnectionProperties().getTxTemplate().execute(t -> { captureExecutedStatement(theTableName, theSql, theArguments);
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
int changesCount = jdbcTemplate.update(theSql, theArguments);
ourLog.info("SQL \"{}\" returned {}", theSql, changesCount);
return changesCount;
});
myChangesCount += changes;
} }
protected void logDryRunSql(@Language("SQL") String theSql) { protected void captureExecutedStatement(String theTableName, @Language("SQL") String theSql, Object[] theArguments) {
ourLog.info("WOULD EXECUTE SQL: {}", theSql); myExecutedStatements.add(new ExecutedStatement(theTableName, theSql, theArguments));
} }
public DriverTypeEnum.ConnectionProperties getConnectionProperties() { public DriverTypeEnum.ConnectionProperties getConnectionProperties() {
@ -108,4 +120,28 @@ public abstract class BaseTask<T extends BaseTask> {
} }
public abstract void execute() throws SQLException; public abstract void execute() throws SQLException;
public static class ExecutedStatement {
private final String mySql;
private final List<Object> myArguments;
private final String myTableName;
public ExecutedStatement(String theDescription, String theSql, Object[] theArguments) {
myTableName = theDescription;
mySql = theSql;
myArguments = theArguments != null ? Arrays.asList(theArguments) : Collections.emptyList();
}
public String getTableName() {
return myTableName;
}
public String getSql() {
return mySql;
}
public List<Object> getArguments() {
return myArguments;
}
}
} }

View File

@ -162,7 +162,7 @@ public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask
// Generate update SQL // Generate update SQL
StringBuilder sqlBuilder = new StringBuilder(); StringBuilder sqlBuilder = new StringBuilder();
List<Long> arguments = new ArrayList<>(); List<Number> arguments = new ArrayList<>();
sqlBuilder.append("UPDATE "); sqlBuilder.append("UPDATE ");
sqlBuilder.append(getTableName()); sqlBuilder.append(getTableName());
sqlBuilder.append(" SET "); sqlBuilder.append(" SET ");
@ -174,7 +174,7 @@ public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask
arguments.add(nextNewValueEntry.getValue()); arguments.add(nextNewValueEntry.getValue());
} }
sqlBuilder.append(" WHERE SP_ID = ?"); sqlBuilder.append(" WHERE SP_ID = ?");
arguments.add((Long) nextRow.get("SP_ID")); arguments.add((Number) nextRow.get("SP_ID"));
// Apply update SQL // Apply update SQL
newJdbcTemnplate().update(sqlBuilder.toString(), arguments.toArray()); newJdbcTemnplate().update(sqlBuilder.toString(), arguments.toArray());

View File

@ -42,7 +42,7 @@ public class DropColumnTask extends BaseTableColumnTask<DropColumnTask> {
String sql = "alter table " + getTableName() + " drop column " + getColumnName(); String sql = "alter table " + getTableName() + " drop column " + getColumnName();
ourLog.info("Dropping column {} on table {}", getColumnName(), getTableName()); ourLog.info("Dropping column {} on table {}", getColumnName(), getTableName());
executeSql(sql); executeSql(getTableName(), sql);
} }
} }

View File

@ -63,15 +63,15 @@ public class DropIndexTask extends BaseTableTask<DropIndexTask> {
switch (getDriverType()) { switch (getDriverType()) {
case MYSQL_5_7: case MYSQL_5_7:
case MARIADB_10_1: case MARIADB_10_1:
sql = "ALTER TABLE " + getTableName() + " DROP INDEX " + myIndexName; sql = "alter table " + getTableName() + " drop index " + myIndexName;
break; break;
case DERBY_EMBEDDED: case DERBY_EMBEDDED:
sql = "DROP INDEX " + myIndexName; sql = "drop index " + myIndexName;
break; break;
case POSTGRES_9_4: case POSTGRES_9_4:
case ORACLE_12C: case ORACLE_12C:
case MSSQL_2012: case MSSQL_2012:
sql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT " + myIndexName; sql = "alter table " + getTableName() + " drop constraint " + myIndexName;
break; break;
} }
} else { } else {
@ -79,19 +79,19 @@ public class DropIndexTask extends BaseTableTask<DropIndexTask> {
switch (getDriverType()) { switch (getDriverType()) {
case MYSQL_5_7: case MYSQL_5_7:
case MARIADB_10_1: case MARIADB_10_1:
sql = "ALTER TABLE " + getTableName() + " DROP INDEX " + myIndexName; sql = "alter table " + getTableName() + " drop index " + myIndexName;
break; break;
case POSTGRES_9_4: case POSTGRES_9_4:
case DERBY_EMBEDDED: case DERBY_EMBEDDED:
case ORACLE_12C: case ORACLE_12C:
sql = "DROP INDEX " + myIndexName; sql = "drop index " + myIndexName;
break; break;
case MSSQL_2012: case MSSQL_2012:
sql = "DROP INDEX " + getTableName() + "." + myIndexName; sql = "drop index " + getTableName() + "." + myIndexName;
break; break;
} }
} }
executeSql(sql); executeSql(getTableName(), sql);
} }

View File

@ -95,12 +95,12 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask<ModifyColumnTask>
ourLog.info("Updating column {} on table {} to type {}", getColumnName(), getTableName(), type); ourLog.info("Updating column {} on table {} to type {}", getColumnName(), getTableName(), type);
if (sql != null) { if (sql != null) {
executeSql(sql); executeSql(getTableName(), sql);
} }
if (sqlNotNull != null) { if (sqlNotNull != null) {
ourLog.info("Updating column {} on table {} to not null", getColumnName(), getTableName()); ourLog.info("Updating column {} on table {} to not null", getColumnName(), getTableName());
executeSql(sqlNotNull); executeSql(getTableName(), sqlNotNull);
} }
} }

View File

@ -322,7 +322,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.unique(false) .unique(false)
.withColumns("HASH_PRESENCE"); .withColumns("HASH_PRESENCE");
ArbitrarySqlTask consolidateSearchParamPresenceIndexesTask = new ArbitrarySqlTask("Consolidate search parameter presence indexes"); ArbitrarySqlTask consolidateSearchParamPresenceIndexesTask = new ArbitrarySqlTask("HFJ_SEARCH_PARM", "Consolidate search parameter presence indexes");
consolidateSearchParamPresenceIndexesTask.setExecuteOnlyIfTableExists("HFJ_SEARCH_PARM"); consolidateSearchParamPresenceIndexesTask.setExecuteOnlyIfTableExists("HFJ_SEARCH_PARM");
consolidateSearchParamPresenceIndexesTask.setBatchSize(1); consolidateSearchParamPresenceIndexesTask.setBatchSize(1);
@ -338,7 +338,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
String resType = (String) t.get("RES_TYPE"); String resType = (String) t.get("RES_TYPE");
String paramName = (String) t.get("PARAM_NAME"); String paramName = (String) t.get("PARAM_NAME");
Long hash = SearchParamPresent.calculateHashPresence(resType, paramName, present); Long hash = SearchParamPresent.calculateHashPresence(resType, paramName, present);
consolidateSearchParamPresenceIndexesTask.executeSql("update HFJ_RES_PARAM_PRESENT set HASH_PRESENCE = ? where PID = ?", hash, pid); consolidateSearchParamPresenceIndexesTask.executeSql("HFJ_RES_PARAM_PRESENT", "update HFJ_RES_PARAM_PRESENT set HASH_PRESENCE = ? where PID = ?", hash, pid);
}); });
version.addTask(consolidateSearchParamPresenceIndexesTask); version.addTask(consolidateSearchParamPresenceIndexesTask);

View File

@ -19,7 +19,7 @@ public class ArbitrarySqlTaskTest extends BaseTest {
executeSql("insert into HFJ_RES_PARAM_PRESENT (PID, SP_ID, SP_PRESENT, HASH_PRESENT) values (100, 1, true, null)"); executeSql("insert into HFJ_RES_PARAM_PRESENT (PID, SP_ID, SP_PRESENT, HASH_PRESENT) values (100, 1, true, null)");
executeSql("insert into HFJ_RES_PARAM_PRESENT (PID, SP_ID, SP_PRESENT, HASH_PRESENT) values (101, 2, true, null)"); executeSql("insert into HFJ_RES_PARAM_PRESENT (PID, SP_ID, SP_PRESENT, HASH_PRESENT) values (101, 2, true, null)");
ArbitrarySqlTask task = new ArbitrarySqlTask("Consolidate search parameter presence indexes"); ArbitrarySqlTask task = new ArbitrarySqlTask("HFJ_RES_PARAM_PRESENT", "Consolidate search parameter presence indexes");
task.setExecuteOnlyIfTableExists("hfj_search_parm"); task.setExecuteOnlyIfTableExists("hfj_search_parm");
task.setBatchSize(1); task.setBatchSize(1);
String sql = "SELECT " + String sql = "SELECT " +
@ -34,7 +34,7 @@ public class ArbitrarySqlTaskTest extends BaseTest {
String resType = (String) t.get("RES_TYPE"); String resType = (String) t.get("RES_TYPE");
String paramName = (String) t.get("PARAM_NAME"); String paramName = (String) t.get("PARAM_NAME");
Long hash = SearchParamPresent.calculateHashPresence(resType, paramName, present); Long hash = SearchParamPresent.calculateHashPresence(resType, paramName, present);
task.executeSql("update HFJ_RES_PARAM_PRESENT set HASH_PRESENT = ? where PID = ?", hash, pid); task.executeSql("HFJ_RES_PARAM_PRESENT", "update HFJ_RES_PARAM_PRESENT set HASH_PRESENT = ? where PID = ?", hash, pid);
}); });
getMigrator().addTask(task); getMigrator().addTask(task);
@ -53,11 +53,11 @@ public class ArbitrarySqlTaskTest extends BaseTest {
@Test @Test
public void testExecuteOnlyIfTableExists() { public void testExecuteOnlyIfTableExists() {
ArbitrarySqlTask task = new ArbitrarySqlTask("Consolidate search parameter presence indexes"); ArbitrarySqlTask task = new ArbitrarySqlTask("HFJ_RES_PARAM_PRESENT", "Consolidate search parameter presence indexes");
task.setBatchSize(1); task.setBatchSize(1);
String sql = "SELECT * FROM HFJ_SEARCH_PARM"; String sql = "SELECT * FROM HFJ_SEARCH_PARM";
task.addQuery(sql, ArbitrarySqlTask.QueryModeEnum.BATCH_UNTIL_NO_MORE, t -> { task.addQuery(sql, ArbitrarySqlTask.QueryModeEnum.BATCH_UNTIL_NO_MORE, t -> {
task.executeSql("update HFJ_RES_PARAM_PRESENT set FOOFOOOFOO = null"); task.executeSql("HFJ_RES_PARAM_PRESENT", "update HFJ_RES_PARAM_PRESENT set FOOFOOOFOO = null");
}); });
task.setExecuteOnlyIfTableExists("hfj_search_parm"); task.setExecuteOnlyIfTableExists("hfj_search_parm");

View File

@ -97,6 +97,11 @@
including support for either allowing the operation response to proceed unchallenged, including support for either allowing the operation response to proceed unchallenged,
or authorizing the contents of the response. or authorizing the contents of the response.
</action> </action>
<action type="add">
An invalid SQL syntax issue has been fixed when running the CLI JPA Migrator tool against
Oracle or SQL Server. In addition, when using the "Dry Run" option, all generated SQL
statements will be logged at the end of the run.
</action>
</release> </release>
<release version="3.6.0" date="2018-11-12" description="Food"> <release version="3.6.0" date="2018-11-12" description="Food">
<action type="add"> <action type="add">