Work on migrator

This commit is contained in:
James Agnew 2018-08-27 08:21:20 -04:00
parent 1ff8f45a3e
commit 07cfed14b8
22 changed files with 842 additions and 37 deletions

View File

@ -0,0 +1,8 @@
package ca.uhn.fhir.util;
public enum VersionEnum {
V3_4_0,
V3_5_0
}

View File

@ -0,0 +1,28 @@
package ca.uhn.fhir.util;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.junit.Assert.assertThat;
public class VersionEnumTest {
@Test
public void testCurrentVersionExists() {
List<String> versions = Arrays.stream(VersionEnum.values())
.map(Enum::name)
.collect(Collectors.toList());
String version = VersionUtil.getVersion();
version = "V" + version.replace(".", "_");
version = version.replace("-SNAPSHOT", "");
assertThat(versions, hasItem(version));
}
}

View File

@ -236,7 +236,12 @@
<artifactId>commons-dbcp2</artifactId>
<scope>test</scope>
</dependency>
<!--
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<!--
<dependency>
<groupId>org.apache.tomcat</groupId>
<artifactId>tomcat-catalina</artifactId>

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -19,23 +19,34 @@ package ca.uhn.fhir.jpa.entity;
* limitations under the License.
* #L%
*/
import java.util.Date;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import java.util.Date;
public interface IBaseResourceEntity {
Date getDeleted();
FhirVersionEnum getFhirVersion();
Long getId();
IdDt getIdDt();
InstantDt getPublished();
Long getResourceId();
String getResourceType();
InstantDt getUpdated();
Date getUpdatedDate();
long getVersion();
boolean isHasTags();
}

View File

@ -32,7 +32,7 @@ import javax.persistence.*;
@Embeddable
@Entity
@Table(name = "HFJ_SPIDX_COORDS", indexes = {
@Index(name = "IDX_SP_COORDS", columnList = "RES_TYPE,SP_NAME,SP_LATITUDE,SP_LONGITUDE"),
@Index(name = "IDX_SP_COORDS_HASH", columnList = "HASH_IDENTITY,SP_VALUE,SP_LATITUDE,SP_LONGITUDE"),
@Index(name = "IDX_SP_COORDS_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_COORDS_RESID", columnList = "RES_ID")
})

View File

@ -44,7 +44,6 @@ import java.math.BigDecimal;
@Index(name = "IDX_SP_QUANTITY_UPDATED", columnList = "SP_UPDATED"),
@Index(name = "IDX_SP_QUANTITY_RESID", columnList = "RES_ID")
})
//@formatter:on
public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearchParam {
private static final int MAX_LENGTH = 200;

View File

@ -1,17 +1,11 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<!--
Note: HAPI projects use the "hapi-fhir" POM as their base to provide easy management.
You do not need to use this in your own projects, so the "parent" tag and it's
contents below may be removed
if you are using this file as a basis for your own project.
-->
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<artifactId>hapi-deployable-pom</artifactId>
<version>3.5.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<artifactId>hapi-fhir-jpaserver-migrate</artifactId>
@ -33,6 +27,11 @@
</dependency>
-->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
</dependency>
<!-- This dependency includes the core HAPI-FHIR classes -->
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
@ -40,17 +39,33 @@
<version>${project.version}</version>
</dependency>
<!-- HAPI-FHIR uses Logback for logging support. The logback library is included automatically by Maven as a part of the hapi-fhir-base dependency, but you also need to include a logging library. Logback
is used here, but log4j would also be fine. -->
<!-- Test Database -->
<dependency>
<groupId>org.apache.derby</groupId>
<artifactId>derby</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-dbcp2</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
</dependency>
<groupId>com.intellij</groupId>
<artifactId>annotations</artifactId>
<version>12.0</version>
</dependency>
</dependencies>
@ -81,8 +96,8 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.7</source>
<target>1.7</target>
<source>8</source>
<target>8</target>
</configuration>
</plugin>

View File

@ -1,16 +0,0 @@
package ca.uhn.fhir.jpa.migrate;
import org.flywaydb.core.Flyway;
import org.flywaydb.core.api.Location;
import org.flywaydb.core.api.configuration.Configuration;
import org.flywaydb.core.api.configuration.FluentConfiguration;
public class Migrator {
public static void main(String[] theArgs) {
Configuration config = new FluentConfiguration()
.locations(new Location());
new Flyway(config);
}
}

View File

@ -0,0 +1,94 @@
package ca.uhn.fhir.jpa.migrate;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.jdbc.datasource.SingleConnectionDataSource;
import org.springframework.transaction.support.TransactionTemplate;
/*-
* #%L
* Smile CDR - CDR
* %%
* Copyright (C) 2016 - 2018 Simpatico Intelligent Systems Inc
* %%
* All rights reserved.
* #L%
*/
public enum DriverTypeEnum {
DERBY_EMBEDDED("org.apache.derby.jdbc.EmbeddedDriver", true),
MARIADB_10_1("org.mariadb.jdbc.Driver", false),
// Formerly com.mysql.jdbc.Driver
MYSQL_5_7("com.mysql.cj.jdbc.Driver", false),
POSTGRES_9_4("org.postgresql.Driver", false),
ORACLE_12C("oracle.jdbc.OracleDriver", false),
MSSQL_2012("com.microsoft.sqlserver.jdbc.SQLServerDataSource", false),
;
private String myDriverClassName;
private boolean myDerby;
/**
* Constructor
*/
DriverTypeEnum(String theDriverClassName, boolean theDerby) {
myDriverClassName = theDriverClassName;
myDerby = theDerby;
}
public ConnectionProperties newJdbcTemplate(String theUrl, String theUsername, String thePassword) {
SingleConnectionDataSource dataSource = new SingleConnectionDataSource();
dataSource.setAutoCommit(false);
dataSource.setDriverClassName(myDriverClassName);
dataSource.setUrl(theUrl);
dataSource.setUsername(theUsername);
dataSource.setPassword(thePassword);
DataSourceTransactionManager transactionManager = new DataSourceTransactionManager();
transactionManager.setDataSource(dataSource);
transactionManager.afterPropertiesSet();
TransactionTemplate txTemplate = new TransactionTemplate();
txTemplate.setTransactionManager(transactionManager);
txTemplate.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW);
txTemplate.afterPropertiesSet();
return new ConnectionProperties(dataSource, txTemplate);
}
public static class ConnectionProperties {
private final SingleConnectionDataSource myDataSource;
private final TransactionTemplate myTxTemplate;
public ConnectionProperties(SingleConnectionDataSource theDataSource, TransactionTemplate theTxTemplate) {
myDataSource = theDataSource;
myTxTemplate = theTxTemplate;
}
public SingleConnectionDataSource getDataSource() {
return myDataSource;
}
public JdbcTemplate newJdbcTemplate() {
JdbcTemplate jdbcTemplate = new JdbcTemplate();
jdbcTemplate.setDataSource(myDataSource);
return jdbcTemplate;
}
public TransactionTemplate getTxTemplate() {
return myTxTemplate;
}
public void close() {
myDataSource.destroy();
}
}
}

View File

@ -0,0 +1,40 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
public class JdbcUtils {
/**
* Retrieve all index names
*/
public static Set<String> getIndexNames(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
Connection connection = dataSource.getConnection();
return theConnectionProperties.getTxTemplate().execute(t -> {
DatabaseMetaData metadata = null;
try {
metadata = connection.getMetaData();
ResultSet indexes = metadata.getIndexInfo(null, null, theTableName, false, false);
Set<String> indexNames = new HashSet<>();
while (indexes.next()) {
String indexName = indexes.getString("INDEX_NAME");
indexNames.add(indexName);
}
return indexNames;
} catch (SQLException e) {
throw new InternalErrorException(e);
}
});
}
}

View File

@ -0,0 +1,69 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
public class Migrator {
private static final Logger ourLog = LoggerFactory.getLogger(Migrator.class);
private DriverTypeEnum myDriverType;
private String myConnectionUrl;
private String myUsername;
private String myPassword;
private List<BaseTask> myTasks = new ArrayList<>();
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
private int myChangesCount;
public int getChangesCount() {
return myChangesCount;
}
public void setDriverType(DriverTypeEnum theDriverType) {
myDriverType = theDriverType;
}
public void setConnectionUrl(String theConnectionUrl) {
myConnectionUrl = theConnectionUrl;
}
public void setUsername(String theUsername) {
myUsername = theUsername;
}
public void setPassword(String thePassword) {
myPassword = thePassword;
}
public void addTask(BaseTask theTask) {
myTasks.add(theTask);
}
public void migrate() {
ourLog.info("Starting migration with {} tasks", myTasks.size());
myConnectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newJdbcTemplate(myConnectionUrl, myUsername, myPassword);
try {
for (BaseTask next : myTasks) {
next.setDriverType(myDriverType);
next.setConnectionProperties(myConnectionProperties);
try {
next.execute();
} catch (SQLException e) {
throw new InternalErrorException("Failure executing task \"" + next.getDescription() + "\", aborting! Cause: " + e.toString(), e);
}
myChangesCount += next.getChangesCount();
}
} finally {
myConnectionProperties.close();
}
ourLog.info("Finished migration of {} tasks", myTasks.size());
}
}

View File

@ -0,0 +1,62 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
public class AddIndexTask extends BaseTask<AddIndexTask> {
private static final Logger ourLog = LoggerFactory.getLogger(AddIndexTask.class);
private String myTableName;
private String myIndexName;
private List<String> myColumns;
private Boolean myUnique;
public void setTableName(String theTableName) {
myTableName = theTableName;
}
public void setIndexName(String theIndexName) {
myIndexName = theIndexName;
}
public void setColumns(List<String> theColumns) {
myColumns = theColumns;
}
public void setUnique(boolean theUnique) {
myUnique = theUnique;
}
@Override
public void validate() {
Validate.notBlank(myIndexName, "Index name not specified");
Validate.notBlank(myTableName, "Table name not specified");
Validate.isTrue(myColumns.size() > 0, "Columns not specified");
Validate.notNull(myUnique, "Uniqueness not specified");
}
@Override
public void execute() throws SQLException {
Set<String> indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), myTableName);
if (indexNames.contains(myIndexName)) {
ourLog.info("Index {} already exists on table {} - No action performed", myIndexName, myTableName);
return;
}
String unique = myUnique ? "UNIQUE " : "";
String columns = String.join(", ", myColumns);
String sql = "CREATE " + unique + " INDEX " + myIndexName + " ON " + myTableName + "(" + columns + ")";
executeSql(sql);
}
public void setColumns(String... theColumns) {
setColumns(Arrays.asList(theColumns));
}
}

View File

@ -0,0 +1,72 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import org.intellij.lang.annotations.Language;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.support.TransactionTemplate;
import java.sql.SQLException;
public abstract class BaseTask<T extends BaseTask> {
private static final Logger ourLog = LoggerFactory.getLogger(BaseTask.class);
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
private DriverTypeEnum myDriverType;
private String myDescription;
private int myChangesCount;
public String getDescription() {
return myDescription;
}
public T setDescription(String theDescription) {
myDescription = theDescription;
return (T) this;
}
public int getChangesCount() {
return myChangesCount;
}
public void executeSql(@Language("SQL") String theSql, Object... theArguments) {
Integer changes = getConnectionProperties().getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
int changesCount = jdbcTemplate.update(theSql, theArguments);
ourLog.info("SQL {} returned {}", theSql, changesCount);
return changesCount;
});
myChangesCount += changes;
}
public DriverTypeEnum.ConnectionProperties getConnectionProperties() {
return myConnectionProperties;
}
public void setConnectionProperties(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
myConnectionProperties = theConnectionProperties;
}
public DriverTypeEnum getDriverType() {
return myDriverType;
}
public void setDriverType(DriverTypeEnum theDriverType) {
myDriverType = theDriverType;
}
public abstract void validate();
public TransactionTemplate getTxTemplate() {
return getConnectionProperties().getTxTemplate();
}
public JdbcTemplate newJdbcTemnplate() {
return getConnectionProperties().newJdbcTemplate();
}
public abstract void execute() throws SQLException;
}

View File

@ -0,0 +1,57 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.ResultSetExtractor;
import org.springframework.jdbc.core.RowMapper;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Function;
public class CalculateHashesTask extends BaseTask {
private String myTableName;
private String myColumnName;
public void setTableName(String theTableName) {
myTableName = theTableName;
}
public void setColumnName(String theColumnName) {
myColumnName = theColumnName;
}
@Override
public void validate() {
Validate.notBlank(myTableName);
Validate.notBlank(myColumnName);
}
@Override
public void execute() {
List<Map<String, Object>> rows = getTxTemplate().execute(t->{
JdbcTemplate jdbcTemplate = newJdbcTemnplate();
int batchSize = 10000;
jdbcTemplate.setMaxRows(batchSize);
String sql = "SELECT * FROM " + myTableName + " WHERE " + myColumnName + " IS NULL";
ourLog.info("Loading up to {} rows in {} with no hashes", batchSize, myTableName);
return jdbcTemplate.queryForList(sql);
});
}
private Map<String, Function<Map<String, Object>, Long>> myColumnMappers;
public void addCalculator(String theColumnName, Function<Map<String, Object>, Long> theConsumer) {
}
private static final Logger ourLog = LoggerFactory.getLogger(CalculateHashesTask.class);
}

View File

@ -0,0 +1,67 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.Set;
public class DropIndexTask extends BaseTask<DropIndexTask> {
private static final Logger ourLog = LoggerFactory.getLogger(DropIndexTask.class);
private String myIndexName;
private String myTableName;
@Override
public void validate() {
Validate.notBlank(myIndexName, "The index name must not be blank");
Validate.notBlank(myTableName, "The table name must not be blank");
if (getDescription() == null) {
setDescription("Drop index " + myIndexName + " on table " + myTableName);
}
}
@Override
public void execute() throws SQLException {
Set<String> indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), myTableName);
if (!indexNames.contains(myIndexName)) {
ourLog.info("Index {} does not exist on table {} - No action needed", myIndexName, myTableName);
return;
}
ourLog.info("Dropping index {} on table {}", myIndexName, myTableName);
String sql = null;
switch (getDriverType()) {
case MYSQL_5_7:
case MARIADB_10_1:
sql = "ALTER TABLE " + myTableName + " DROP INDEX " + myIndexName;
break;
case POSTGRES_9_4:
case DERBY_EMBEDDED:
case ORACLE_12C:
sql = "DROP INDEX " + myIndexName;
break;
case MSSQL_2012:
sql = "DROP INDEX " + myTableName + "." + myIndexName;
break;
}
executeSql(sql);
}
public DropIndexTask setTableName(String theTableName) {
myTableName = theTableName;
return this;
}
public DropIndexTask setIndexName(String theIndexName) {
myIndexName = theIndexName;
return this;
}
}

View File

@ -0,0 +1,112 @@
package ca.uhn.fhir.jpa.migrate.tasks;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamCoords;
import ca.uhn.fhir.jpa.migrate.taskdef.AddIndexTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask;
import ca.uhn.fhir.jpa.migrate.taskdef.DropIndexTask;
import ca.uhn.fhir.util.VersionEnum;
import com.google.common.collect.Multimap;
import com.google.common.collect.MultimapBuilder;
@SuppressWarnings("UnstableApiUsage")
public class HapiFhirJpaMigrationTasks {
private Multimap<VersionEnum, BaseTask<?>> myTasks = MultimapBuilder.hashKeys().arrayListValues().build();
/**
* Constructor
*/
public HapiFhirJpaMigrationTasks() {
// Forced ID changes
forVersion(VersionEnum.V3_5_0)
.onTable("HFJ_FORCED_ID")
.dropIndex("IDX_FORCEDID_TYPE_FORCEDID");
forVersion(VersionEnum.V3_5_0)
.onTable("HFJ_FORCED_ID")
.dropIndex("IDX_FORCEDID_TYPE_RESID");
forVersion(VersionEnum.V3_5_0)
.onTable("HFJ_FORCED_ID")
.addIndex("IDX_FORCEDID_TYPE_FID")
.unique(true)
.withColumns("RESOURCE_TYPE", "FORCED_ID");
// Indexes - Coords
forVersion(VersionEnum.V3_5_0)
.onTable("HFJ_SPIDX_COORDS")
.dropIndex("IDX_SP_COORDS_HASH");
forVersion(VersionEnum.V3_5_0)
.onTable("HFJ_SPIDX_COORDS")
.addIndex("IDX_SP_COORDS_HASH")
.unique(false)
.withColumns("HASH_IDENTITY", "SP_VALUE", "SP_LATITUDE", "SP_LONGITUDE");
forVersion(VersionEnum.V3_5_0)
.addTask(new CalculateHashesTask().calculator(()->{
return ResourceIndexedSearchParamCoords.calculateHashIdentity("resourceType", "paramName");
}));
}
private Builder forVersion(VersionEnum theVersion) {
return new Builder(theVersion);
}
private class Builder {
private final VersionEnum myVersion;
private String myTableName;
public Builder(VersionEnum theVersion) {
myVersion = theVersion;
}
public BuilderWithTableName onTable(String theTableName) {
myTableName = theTableName;
return new BuilderWithTableName();
}
private void addTask(BaseTask theTask) {
theTask.validate();
myTasks.put(myVersion, theTask);
}
private class BuilderWithTableName {
private String myIndexName;
void dropIndex(String theIndexName) {
DropIndexTask task = new DropIndexTask();
task.setIndexName(theIndexName);
task.setTableName(myTableName);
addTask(task);
}
public BuilderAddIndexWithName addIndex(String theIndexName) {
myIndexName = theIndexName;
return new BuilderAddIndexWithName();
}
private class BuilderAddIndexWithName {
private boolean myUnique;
public BuilderAddIndexUnique unique(boolean theUnique) {
myUnique = theUnique;
return new BuilderAddIndexUnique();
}
private class BuilderAddIndexUnique {
public void withColumns(String... theColumnNames) {
AddIndexTask task = new AddIndexTask();
task.setTableName(myTableName);
task.setIndexName(myIndexName);
task.setUnique(myUnique);
task.setColumns(theColumnNames);
addTask(task);
}
}
}
}
}
}

View File

@ -0,0 +1,50 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import com.google.common.collect.Lists;
import org.junit.Test;
import java.sql.SQLException;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertThat;
public class AddIndexTest extends BaseTest {
@Test
public void testIndexAlreadyExists() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
executeSql("create unique index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
executeSql("create unique index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
AddIndexTask task = new AddIndexTask();
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");
task.setColumns("PID", "TEXTCOL");
task.setUnique(false);
getMigrator().addTask(task);
getMigrator().migrate();
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("IDX_DIFINDEX", "IDX_ANINDEX"));
}
@Test
public void testIndexDoesntAlreadyExist() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
executeSql("create unique index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
AddIndexTask task = new AddIndexTask();
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");
task.setColumns("PID", "TEXTCOL");
task.setUnique(false);
getMigrator().addTask(task);
getMigrator().migrate();
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("IDX_DIFINDEX", "IDX_ANINDEX"));
}
}

View File

@ -0,0 +1,54 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.Migrator;
import org.intellij.lang.annotations.Language;
import org.junit.After;
import org.junit.Before;
public class BaseTest {
private static int ourDatabaseUrl = 0;
private String myUrl;
private Migrator myMigrator;
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
public String getUrl() {
return myUrl;
}
public DriverTypeEnum.ConnectionProperties getConnectionProperties() {
return myConnectionProperties;
}
protected void executeSql(@Language("SQL") String theSql) {
myConnectionProperties.getTxTemplate().execute(t -> {
myConnectionProperties.newJdbcTemplate().execute(theSql);
return null;
});
}
public Migrator getMigrator() {
return myMigrator;
}
@After
public void after() {
myConnectionProperties.close();
}
@Before()
public void before() {
myUrl = "jdbc:derby:memory:database " + (ourDatabaseUrl++) + ";create=true";
myConnectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newJdbcTemplate(myUrl, "SA", "SA");
myMigrator = new Migrator();
myMigrator.setConnectionUrl(myUrl);
myMigrator.setDriverType(DriverTypeEnum.DERBY_EMBEDDED);
myMigrator.setUsername("SA");
myMigrator.setPassword("SA");
}
}

View File

@ -0,0 +1,46 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.junit.Test;
import java.sql.SQLException;
import static org.hamcrest.Matchers.contains;
import static org.junit.Assert.assertThat;
public class DropIndexTest extends BaseTest {
@Test
public void testIndexAlreadyExists() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
executeSql("create unique index IDX_ANINDEX on SOMETABLE (PID, TEXTCOL)");
executeSql("create unique index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
DropIndexTask task = new DropIndexTask();
task.setDescription("Drop an index");
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");
getMigrator().addTask(task);
getMigrator().migrate();
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), contains("IDX_DIFINDEX"));
}
@Test
public void testIndexDoesntAlreadyExist() throws SQLException {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
executeSql("create unique index IDX_DIFINDEX on SOMETABLE (TEXTCOL)");
DropIndexTask task = new DropIndexTask();
task.setDescription("Drop an index");
task.setIndexName("IDX_ANINDEX");
task.setTableName("SOMETABLE");
getMigrator().addTask(task);
getMigrator().migrate();
assertThat(JdbcUtils.getIndexNames(getConnectionProperties(), "SOMETABLE"), contains("IDX_DIFINDEX"));
}
}

View File

@ -0,0 +1,13 @@
package ca.uhn.fhir.jpa.migrate.tasks;
import org.junit.Test;
public class HapiFhirJpaMigrationTasksTest {
@Test
public void testCreate() {
new HapiFhirJpaMigrationTasks();
}
}

View File

@ -0,0 +1,14 @@
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] - %msg%n
</pattern>
</encoder>
</appender>
<root level="info">
<appender-ref ref="STDOUT" />
</root>
</configuration>

View File

@ -1156,6 +1156,11 @@
<artifactId>spring-core</artifactId>
<version>${spring_version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${spring_version}</version>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-jpa</artifactId>