Allow constraints to be deleted by the migrator on H2 (#2002)
This commit is contained in:
parent
a35c97e13d
commit
06fd306898
|
@ -28,11 +28,16 @@ import org.apache.commons.lang3.builder.HashCodeBuilder;
|
|||
import org.intellij.lang.annotations.Language;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.jdbc.core.RowMapperResultSetExtractor;
|
||||
import org.springframework.jdbc.core.SingleColumnRowMapper;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
public class DropIndexTask extends BaseTableTask {
|
||||
|
@ -54,6 +59,52 @@ public class DropIndexTask extends BaseTableTask {
|
|||
|
||||
@Override
|
||||
public void doExecute() throws SQLException {
|
||||
/*
|
||||
* Derby and H2 both behave a bit weirdly if you create a unique constraint
|
||||
* using the @UniqueConstraint annotation in hibernate - They will create a
|
||||
* constraint with that name, but will then create a shadow index with a different
|
||||
* name, and it's that different name that gets reported when you query for the
|
||||
* list of indexes.
|
||||
*
|
||||
* For example, on H2 if you create a constraint named "IDX_FOO", the system
|
||||
* will create an index named "IDX_FOO_INDEX_A" and a constraint named "IDX_FOO".
|
||||
*
|
||||
* The following is a solution that uses appropriate native queries to detect
|
||||
* on the given platforms whether an index name actually corresponds to a
|
||||
* constraint, and delete that constraint.
|
||||
*/
|
||||
|
||||
@Language("SQL") String findConstraintSql;
|
||||
@Language("SQL") String dropConstraintSql;
|
||||
if (getDriverType() == DriverTypeEnum.H2_EMBEDDED) {
|
||||
findConstraintSql = "SELECT DISTINCT INDEX_NAME FROM INFORMATION_SCHEMA.INDEXES WHERE constraint_name = ? AND table_name = ?";
|
||||
dropConstraintSql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT " + myIndexName;
|
||||
} else if (getDriverType() == DriverTypeEnum.DERBY_EMBEDDED) {
|
||||
findConstraintSql = "SELECT c.constraintname FROM sys.sysconstraints c, sys.systables t WHERE c.tableid = t.tableid AND c.constraintname = ? AND t.tablename = ?";
|
||||
dropConstraintSql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT " + myIndexName;
|
||||
} else {
|
||||
findConstraintSql = null;
|
||||
dropConstraintSql = null;
|
||||
}
|
||||
|
||||
if (findConstraintSql != null) {
|
||||
DataSource dataSource = Objects.requireNonNull(getConnectionProperties().getDataSource());
|
||||
Boolean handled = getConnectionProperties().getTxTemplate().execute(t -> {
|
||||
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
|
||||
RowMapperResultSetExtractor<String> resultSetExtractor = new RowMapperResultSetExtractor<>(new SingleColumnRowMapper<>(String.class));
|
||||
List<String> outcome = jdbcTemplate.query(findConstraintSql, new Object[]{myIndexName, getTableName()}, resultSetExtractor);
|
||||
assert outcome != null;
|
||||
if (outcome.size() > 0) {
|
||||
executeSql(getTableName(), dropConstraintSql);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
if (Boolean.TRUE.equals(handled)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Set<String> indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), getTableName());
|
||||
|
||||
if (!indexNames.contains(myIndexName)) {
|
||||
|
|
|
@ -136,13 +136,20 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
empiLink.addColumn("20200715.3", "NEW_PERSON").nullable().type(ColumnTypeEnum.BOOLEAN);
|
||||
empiLink.addColumn("20200715.4", "VECTOR").nullable().type(ColumnTypeEnum.LONG);
|
||||
empiLink.addColumn("20200715.5", "SCORE").nullable().type(ColumnTypeEnum.FLOAT);
|
||||
|
||||
init510_20200725();
|
||||
}
|
||||
|
||||
protected void init510_20200725() {
|
||||
// nothing
|
||||
}
|
||||
|
||||
protected void init510_20200610() {
|
||||
// nothing
|
||||
}
|
||||
|
||||
protected void init510_20200706_to_20200714() {
|
||||
|
||||
// nothing
|
||||
}
|
||||
|
||||
private void init501() { //20200514 - present
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
|
@ -10,6 +11,8 @@ import java.util.function.Supplier;
|
|||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
public class DropIndexTest extends BaseTest {
|
||||
|
||||
|
@ -93,4 +96,23 @@ public class DropIndexTest extends BaseTest {
|
|||
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), contains("IDX_DIFINDEX"));
|
||||
}
|
||||
|
||||
@ParameterizedTest(name = "{index}: {0}")
|
||||
@MethodSource("data")
|
||||
public void testDropConstraintIndex(Supplier<TestDatabaseDetails> theTestDatabaseDetails) throws SQLException {
|
||||
before(theTestDatabaseDetails);
|
||||
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), TEXTCOL2 varchar(255))");
|
||||
executeSql("alter table SOMETABLE add constraint IDX_DIFINDEX unique (TEXTCOL, TEXTCOL2)");
|
||||
|
||||
DropIndexTask task = new DropIndexTask("1", "1");
|
||||
task.setDescription("Drop an index");
|
||||
task.setIndexName("IDX_DIFINDEX");
|
||||
task.setTableName("SOMETABLE");
|
||||
getMigrator().addTask(task);
|
||||
|
||||
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), not(empty()));
|
||||
getMigrator().migrate();
|
||||
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), empty());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue