Add drop table task to migrator
This commit is contained in:
parent
5bf4fa22e7
commit
cf68efb826
|
@ -79,9 +79,6 @@
|
|||
|
||||
<build>
|
||||
|
||||
<!-- Tells Maven to name the generated WAR file as hapi-fhir-jpaserver-example.war -->
|
||||
<finalName>hapi-fhir-jpaserver-example</finalName>
|
||||
|
||||
<!-- The following is not required for the application to build, but allows you to test it by issuing "mvn jetty:run" from the command line. -->
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
|
|
|
@ -62,13 +62,19 @@ public class JdbcUtils {
|
|||
DatabaseMetaData metadata;
|
||||
try {
|
||||
metadata = connection.getMetaData();
|
||||
ResultSet indexes = metadata.getIndexInfo(connection.getCatalog(), connection.getSchema(), massageIdentifier(metadata, theTableName), false, true);
|
||||
|
||||
ResultSet indexes = metadata.getIndexInfo(connection.getCatalog(), connection.getSchema(), massageIdentifier(metadata, theTableName), false, false);
|
||||
Set<String> indexNames = new HashSet<>();
|
||||
while (indexes.next()) {
|
||||
|
||||
ourLog.debug("*** Next index: {}", new ColumnMapRowMapper().mapRow(indexes, 0));
|
||||
String indexName = indexes.getString("INDEX_NAME");
|
||||
indexName = toUpperCase(indexName, Locale.US);
|
||||
indexNames.add(indexName);
|
||||
}
|
||||
|
||||
indexes = metadata.getIndexInfo(connection.getCatalog(), connection.getSchema(), massageIdentifier(metadata, theTableName), true, false);
|
||||
while (indexes.next()) {
|
||||
ourLog.debug("*** Next index: {}", new ColumnMapRowMapper().mapRow(indexes, 0));
|
||||
String indexName = indexes.getString("INDEX_NAME");
|
||||
indexName = toUpperCase(indexName, Locale.US);
|
||||
indexNames.add(indexName);
|
||||
|
|
|
@ -67,6 +67,8 @@ public class AddIndexTask extends BaseTableTask<AddIndexTask> {
|
|||
return;
|
||||
}
|
||||
|
||||
ourLog.info("Going to add a {} index named {} on table {} for columns {}", (myUnique ? "UNIQUE" : "NON-UNIQUE"), myIndexName, getTableName(), myColumns);
|
||||
|
||||
String unique = myUnique ? "unique " : "";
|
||||
String columns = String.join(", ", myColumns);
|
||||
String sql = "create " + unique + "index " + myIndexName + " on " + getTableName() + "(" + columns + ")";
|
||||
|
|
|
@ -20,11 +20,13 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -56,45 +58,53 @@ public class DropIndexTask extends BaseTableTask<DropIndexTask> {
|
|||
String uniquenessString = isUnique ? "unique" : "non-unique";
|
||||
ourLog.info("Dropping {} index {} on table {}", uniquenessString, myIndexName, getTableName());
|
||||
|
||||
String sql = createDropIndexSql(getConnectionProperties(), getTableName(), myIndexName, getDriverType());
|
||||
executeSql(getTableName(), sql);
|
||||
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
static String createDropIndexSql(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theIndexName, DriverTypeEnum theDriverType) throws SQLException {
|
||||
boolean isUnique = JdbcUtils.isIndexUnique(theConnectionProperties, theTableName, theIndexName);
|
||||
|
||||
String sql = null;
|
||||
|
||||
if (isUnique) {
|
||||
// Drop constraint
|
||||
switch (getDriverType()) {
|
||||
switch (theDriverType) {
|
||||
case MYSQL_5_7:
|
||||
case MARIADB_10_1:
|
||||
sql = "alter table " + getTableName() + " drop index " + myIndexName;
|
||||
sql = "alter table " + theTableName + " drop index " + theIndexName;
|
||||
break;
|
||||
case H2_EMBEDDED:
|
||||
case DERBY_EMBEDDED:
|
||||
sql = "drop index " + myIndexName;
|
||||
sql = "drop index " + theIndexName;
|
||||
break;
|
||||
case POSTGRES_9_4:
|
||||
case ORACLE_12C:
|
||||
case MSSQL_2012:
|
||||
sql = "alter table " + getTableName() + " drop constraint " + myIndexName;
|
||||
sql = "alter table " + theTableName + " drop constraint " + theIndexName;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
// Drop index
|
||||
switch (getDriverType()) {
|
||||
switch (theDriverType) {
|
||||
case MYSQL_5_7:
|
||||
case MARIADB_10_1:
|
||||
sql = "alter table " + getTableName() + " drop index " + myIndexName;
|
||||
sql = "alter table " + theTableName + " drop index " + theIndexName;
|
||||
break;
|
||||
case POSTGRES_9_4:
|
||||
case DERBY_EMBEDDED:
|
||||
case H2_EMBEDDED:
|
||||
case ORACLE_12C:
|
||||
sql = "drop index " + myIndexName;
|
||||
sql = "drop index " + theIndexName;
|
||||
break;
|
||||
case MSSQL_2012:
|
||||
sql = "drop index " + getTableName() + "." + myIndexName;
|
||||
sql = "drop index " + theTableName + "." + theIndexName;
|
||||
break;
|
||||
}
|
||||
}
|
||||
executeSql(getTableName(), sql);
|
||||
|
||||
return sql;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Migration
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2019 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Set;
|
||||
|
||||
public class DropTableTask extends BaseTableTask<DropTableTask> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(DropTableTask.class);
|
||||
private String myIndexName;
|
||||
|
||||
@Override
|
||||
public void validate() {
|
||||
super.validate();
|
||||
Validate.notBlank(myIndexName, "The index name must not be blank");
|
||||
|
||||
if (getDescription() == null) {
|
||||
setDescription("Drop index " + myIndexName + " on table " + getTableName());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute() throws SQLException {
|
||||
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
|
||||
if (!tableNames.contains(getTableName())) {
|
||||
return;
|
||||
}
|
||||
|
||||
Set<String> indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), getTableName());
|
||||
for (String nextIndex : indexNames) {
|
||||
String sql = DropIndexTask.createDropIndexSql(getConnectionProperties(), getTableName(), nextIndex, getDriverType());
|
||||
ourLog.info("Dropping index {} on table {} in preparation for table delete", nextIndex, getTableName());
|
||||
executeSql(getTableName(), sql);
|
||||
}
|
||||
|
||||
ourLog.info("Dropping table: {}", getTableName());
|
||||
|
||||
String sql = "DROP TABLE " + getTableName();
|
||||
executeSql(getTableName(), sql);
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -137,6 +137,12 @@ public class BaseMigrationTasks<T extends Enum> {
|
|||
addTask(task);
|
||||
}
|
||||
|
||||
public void dropThisTable() {
|
||||
DropTableTask task = new DropTableTask();
|
||||
task.setTableName(myTableName);
|
||||
addTask(task);
|
||||
}
|
||||
|
||||
public BuilderAddIndexWithName addIndex(String theIndexName) {
|
||||
return new BuilderAddIndexWithName(theIndexName);
|
||||
}
|
||||
|
|
|
@ -6,14 +6,34 @@ import org.junit.Test;
|
|||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class AddIndexTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testIndexAlreadyExists() throws SQLException {
|
||||
public void testUniqueConstraintAlreadyExists() throws SQLException {
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
executeSql("ALTER TABLE SOMETABLE ADD CONSTRAINT IDX_ANINDEX UNIQUE(TEXTCOL)");
|
||||
|
||||
AddIndexTask task = new AddIndexTask();
|
||||
task.setIndexName("IDX_ANINDEX");
|
||||
task.setTableName("SOMETABLE");
|
||||
task.setColumns("TEXTCOL");
|
||||
task.setUnique(true);
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
getMigrator().migrate();
|
||||
getMigrator().migrate();
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), hasItem("IDX_ANINDEX"));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUniqueIndexAlreadyExists() throws SQLException {
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
executeSql("create unique index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
|
||||
executeSql("create unique index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
|
||||
|
@ -25,6 +45,30 @@ public class AddIndexTest extends BaseTest {
|
|||
task.setUnique(false);
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
getMigrator().migrate();
|
||||
getMigrator().migrate();
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("IDX_DIFINDEX", "IDX_ANINDEX"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNonUniqueIndexAlreadyExists() throws SQLException {
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
executeSql("create index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
|
||||
executeSql("create index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
|
||||
|
||||
AddIndexTask task = new AddIndexTask();
|
||||
task.setIndexName("IDX_ANINDEX");
|
||||
task.setTableName("SOMETABLE");
|
||||
task.setColumns("PID", "TEXTCOL");
|
||||
task.setUnique(false);
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
getMigrator().migrate();
|
||||
getMigrator().migrate();
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("IDX_DIFINDEX", "IDX_ANINDEX"));
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
package ca.uhn.fhir.jpa.migrate.taskdef;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.hasItems;
|
||||
import static org.hamcrest.core.IsNot.not;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class DropTableTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testDropExistingTable() throws SQLException {
|
||||
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
|
||||
executeSql("create index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
|
||||
executeSql("create index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
|
||||
|
||||
DropTableTask task = new DropTableTask();
|
||||
task.setTableName("SOMETABLE");
|
||||
getMigrator().addTask(task);
|
||||
|
||||
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), (hasItems("SOMETABLE")));
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), not(hasItems("SOMETABLE")));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDropNonExistingTable() throws SQLException {
|
||||
|
||||
DropTableTask task = new DropTableTask();
|
||||
task.setTableName("SOMETABLE");
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), not(hasItems("SOMETABLE")));
|
||||
}
|
||||
|
||||
}
|
|
@ -11,6 +11,7 @@ import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
|
|||
import ca.uhn.fhir.spring.boot.autoconfigure.FhirAutoConfiguration.FhirJpaServerConfiguration.Dstu3;
|
||||
import org.assertj.core.util.Arrays;
|
||||
import org.junit.After;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
@ -101,18 +102,21 @@ public class FhirAutoConfigurationTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void withValidation() {
|
||||
load();
|
||||
assertThat(this.context.getBeansOfType(IServerInterceptor.class)).hasSize(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void withValidations() {
|
||||
load("hapi.fhir.validation.request-only:false");
|
||||
assertThat(this.context.getBeansOfType(IServerInterceptor.class)).hasSize(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void withCustomValidationSchemaLocation() {
|
||||
load("hapi.fhir.validation.schema-location:custom-schema-location");
|
||||
assertThat(this.context.getBeansOfType(IServerInterceptor.class)).hasSize(1);
|
||||
|
|
Loading…
Reference in New Issue