Migrator enhancements and adjust reindexer to account for missing

versions
This commit is contained in:
James Agnew 2018-12-04 17:33:29 -05:00
parent c484c69664
commit 8c7f249a21
20 changed files with 631 additions and 139 deletions

View File

@ -36,6 +36,50 @@ public class HapiMigrateDatabaseCommandTest {
System.setProperty("test", "true");
}
@Test
public void testMigrate_340_370() throws IOException {
File directory = new File("target/migrator_derby_test_340_360");
if (directory.exists()) {
FileUtils.deleteDirectory(directory);
}
String url = "jdbc:derby:directory:" + directory.getAbsolutePath() + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "", "");
String initSql = "/persistence_create_derby107_340.sql";
executeSqlStatements(connectionProperties, initSql);
seedDatabase340(connectionProperties);
ourLog.info("**********************************************");
ourLog.info("Done Setup, Starting Migration...");
ourLog.info("**********************************************");
String[] args = new String[]{
"migrate-database",
"-d", "DERBY_EMBEDDED",
"-u", url,
"-n", "",
"-p", "",
"-f", "V3_4_0",
"-t", "V3_7_0"
};
App.main(args);
connectionProperties.getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
List<Map<String, Object>> values = jdbcTemplate.queryForList("SELECT * FROM hfj_spidx_token");
assertEquals(1, values.size());
assertEquals("identifier", values.get(0).get("SP_NAME"));
assertEquals("12345678", values.get(0).get("SP_VALUE"));
assertTrue(values.get(0).keySet().contains("HASH_IDENTITY"));
assertEquals(7001889285610424179L, values.get(0).get("HASH_IDENTITY"));
return null;
});
}
@Test
public void testMigrate_340_350() throws IOException {
@ -102,49 +146,7 @@ public class HapiMigrateDatabaseCommandTest {
});
}
@Test
public void testMigrate_340_360() throws IOException {
File directory = new File("target/migrator_derby_test_340_360");
if (directory.exists()) {
FileUtils.deleteDirectory(directory);
}
String url = "jdbc:derby:directory:" + directory.getAbsolutePath() + ";create=true";
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.DERBY_EMBEDDED.newConnectionProperties(url, "", "");
String initSql = "/persistence_create_derby107_340.sql";
executeSqlStatements(connectionProperties, initSql);
seedDatabase340(connectionProperties);
ourLog.info("**********************************************");
ourLog.info("Done Setup, Starting Migration...");
ourLog.info("**********************************************");
String[] args = new String[]{
"migrate-database",
"-d", "DERBY_EMBEDDED",
"-u", url,
"-n", "",
"-p", "",
"-f", "V3_4_0",
"-t", "V3_6_0"
};
App.main(args);
connectionProperties.getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = connectionProperties.newJdbcTemplate();
List<Map<String, Object>> values = jdbcTemplate.queryForList("SELECT * FROM hfj_spidx_token");
assertEquals(1, values.size());
assertEquals("identifier", values.get(0).get("SP_NAME"));
assertEquals("12345678", values.get(0).get("SP_VALUE"));
assertTrue(values.get(0).keySet().contains("HASH_IDENTITY"));
assertEquals(7001889285610424179L, values.get(0).get("HASH_IDENTITY"));
return null;
});
}
private void seedDatabase340(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
theConnectionProperties.getTxTemplate().execute(t -> {
JdbcTemplate jdbcTemplate = theConnectionProperties.newJdbcTemplate();

View File

@ -895,7 +895,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
@SuppressWarnings("unchecked")
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res) {
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res, Long theVersion) {
R retVal = (R) res;
if (theEntity.getDeleted() != null) {
res = (IResource) myContext.getResourceDefinition(theResourceType).newInstance();
@ -917,7 +917,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
}
res.setId(theEntity.getIdDt());
res.setId(theEntity.getIdDt().withVersion(theVersion.toString()));
ResourceMetadataKeyEnum.VERSION.put(res, Long.toString(theEntity.getVersion()));
ResourceMetadataKeyEnum.PUBLISHED.put(res, theEntity.getPublished());
@ -961,7 +961,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
}
@SuppressWarnings("unchecked")
private <R extends IBaseResource> R populateResourceMetadataRi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res) {
private <R extends IBaseResource> R populateResourceMetadataRi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res, Long theVersion) {
R retVal = (R) res;
if (theEntity.getDeleted() != null) {
res = (IAnyResource) myContext.getResourceDefinition(theResourceType).newInstance();
@ -990,6 +990,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
res.getMeta().setVersionId(null);
populateResourceIdFromEntity(theEntity, res);
res.setId(res.getIdElement().withVersion(theVersion.toString()));
res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
IDao.RESOURCE_PID.put(res, theEntity.getId());
@ -1136,26 +1137,36 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
byte[] resourceBytes = null;
ResourceEncodingEnum resourceEncoding = null;
Collection<? extends BaseTag> myTagList = null;
Long version = null;
if (theEntity instanceof ResourceHistoryTable) {
ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
resourceBytes = history.getResource();
resourceEncoding = history.getEncoding();
myTagList = history.getTags();
version = history.getVersion();
} else if (theEntity instanceof ResourceTable) {
ResourceTable resource = (ResourceTable) theEntity;
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
if (history == null) {
return null;
version = theEntity.getVersion();
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), version);
while (history == null) {
if (version > 1L) {
version--;
history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), version);
} else {
return null;
}
}
resourceBytes = history.getResource();
resourceEncoding = history.getEncoding();
myTagList = resource.getTags();
version = history.getVersion();
} else if (theEntity instanceof ResourceSearchView) {
// This is the search View
ResourceSearchView myView = (ResourceSearchView) theEntity;
resourceBytes = myView.getResource();
resourceEncoding = myView.getEncoding();
version = myView.getVersion();
if (theTagList == null)
myTagList = new HashSet<>();
else
@ -1220,10 +1231,10 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
// 5. fill MetaData
if (retVal instanceof IResource) {
IResource res = (IResource) retVal;
retVal = populateResourceMetadataHapi(resourceType, theEntity, myTagList, theForHistoryOperation, res);
retVal = populateResourceMetadataHapi(resourceType, theEntity, myTagList, theForHistoryOperation, res, version);
} else {
IAnyResource res = (IAnyResource) retVal;
retVal = populateResourceMetadataRi(resourceType, theEntity, myTagList, theForHistoryOperation, res);
retVal = populateResourceMetadataRi(resourceType, theEntity, myTagList, theForHistoryOperation, res, version);
}
return retVal;

View File

@ -8,6 +8,7 @@ import javax.persistence.TemporalType;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.jpa.repository.Temporal;
import org.springframework.data.repository.query.Param;
@ -91,4 +92,8 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
"INNER JOIN ResourceTable r ON (r.myId = h.myResourceId and r.myVersion = h.myResourceVersion) " +
"WHERE r.myId in (:pids)")
Collection<ResourceHistoryTable> findByResourceIds(@Param("pids") Collection<Long> pids);
@Modifying
@Query("UPDATE ResourceHistoryTable r SET r.myResourceVersion = :newVersion WHERE r.myResourceId = :id AND r.myResourceVersion = :oldVersion")
void updateVersion(@Param("id") long theId, @Param("oldVersion") long theOldVersion, @Param("newVersion") long theNewVersion);
}

View File

@ -27,6 +27,7 @@ import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.DaoRegistry;
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceReindexJobDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.model.entity.ForcedId;
@ -87,6 +88,8 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
@Autowired
private IResourceTableDao myResourceTableDao;
@Autowired
private IResourceHistoryTableDao myResourceHistoryTableDao;
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private IForcedIdDao myForcedIdDao;
@ -456,10 +459,18 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
}
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(resourceTable.getResourceType());
IBaseResource resource = dao.toResource(resourceTable, false);
long expectedVersion = resourceTable.getVersion();
IBaseResource resource = dao.read(resourceTable.getIdDt().toVersionless());
if (resource == null) {
throw new InternalErrorException("Could not find resource version " + resourceTable.getIdDt().toUnqualified().getValue() + " in database");
}
Long actualVersion = resource.getIdElement().getVersionIdPartAsLong();
if (actualVersion < expectedVersion) {
ourLog.warn("Resource {} version {} does not exist, renumbering version {}", resource.getIdElement().toUnqualifiedVersionless().getValue(), resource.getIdElement().getVersionIdPart(), expectedVersion);
myResourceHistoryTableDao.updateVersion(resourceTable.getId(), actualVersion, expectedVersion);
}
doReindex(resourceTable, resource);
return null;

View File

@ -66,6 +66,7 @@ import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
@ -135,6 +136,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
private Cache<TranslationQuery, List<TermConceptMapGroupElement>> myTranslationWithReverseCache;
private int myFetchSize = DEFAULT_FETCH_SIZE;
private ApplicationContext myApplicationContext;
private TransactionTemplate myTxTemplate;
/**
* @param theAdd If true, add the code. If false, remove the code.
@ -366,6 +368,9 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
}
}
@Autowired
private PlatformTransactionManager myTransactionManager;
@Override
@Transactional
public void deleteConceptMapAndChildren(ResourceTable theResourceTable) {
@ -384,7 +389,12 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
break;
}
theDao.deleteInBatch(link);
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
txTemplate.execute(t->{
theDao.deleteInBatch(link);
return null;
});
count += link.getNumberOfElements();
ourLog.info(" * {} {} deleted - {}/sec - ETA: {}", count, theDescriptor, sw.formatThroughput(count, TimeUnit.SECONDS), sw.getEstimatedTimeRemaining(count, totalCount));
@ -889,7 +899,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
}
TransactionTemplate tt = new TransactionTemplate(myTransactionMgr);
tt.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW);
tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
tt.execute(new TransactionCallbackWithoutResult() {
private void createParentsString(StringBuilder theParentsBuilder, Long theConceptPid) {
Validate.notNull(theConceptPid, "theConceptPid must not be null");
@ -1016,7 +1026,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
}
TransactionTemplate tt = new TransactionTemplate(myTransactionMgr);
tt.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW);
tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
if (!myDeferredConcepts.isEmpty() || !myConceptLinksToSaveLater.isEmpty()) {
tt.execute(t -> {
processDeferredConcepts();
@ -1052,6 +1062,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
@PostConstruct
public void start() {
myCodeSystemResourceDao = myApplicationContext.getBean(IFhirResourceDaoCodeSystem.class);
myTxTemplate = new TransactionTemplate(myTransactionManager);
}
@Override
@ -1065,8 +1076,6 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
// Grab the existing versions so we can delete them later
List<TermCodeSystemVersion> existing = myCodeSystemVersionDao.findByCodeSystemResource(theCodeSystemResourcePid);
// verifyNoDuplicates(theCodeSystemVersion.getConcepts(), new HashSet<String>());
/*
* For now we always delete old versions.. At some point it would be nice to allow configuration to keep old versions
*/

View File

@ -67,7 +67,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
public static final String LOINC_ANSWERLIST_LINK_FILE = "LoincAnswerListLink.csv";
public static final String LOINC_DOCUMENT_ONTOLOGY_FILE = "DocumentOntology.csv";
public static final String LOINC_UPLOAD_PROPERTIES_FILE = "loincupload.properties";
public static final String LOINC_FILE = "Loinc.csv";
public static final String LOINC_FILE = "LoincTable/Loinc.csv";
public static final String LOINC_HIERARCHY_FILE = "MultiAxialHierarchy.csv";
public static final String LOINC_PART_FILE = "Part.csv";
public static final String LOINC_PART_LINK_FILE = "LoincPartLink.csv";
@ -135,6 +135,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
} else {
matches = nextFilename.endsWith("/" + theFileNamePart) || nextFilename.equals(theFileNamePart);
}
if (matches) {
ourLog.info("Processing file {}", nextFilename);
foundMatch = true;

View File

@ -11,6 +11,7 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.exceptions.*;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
@ -577,6 +578,50 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
}
@Test
public void testReindexingCurrentVersionDeleted() {
Patient p = new Patient();
p.addName().setFamily("family1");
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
p = new Patient();
p.setId(id);
p.addName().setFamily("family1");
p.addName().setFamily("family2");
myPatientDao.update(p);
p = new Patient();
p.setId(id);
p.addName().setFamily("family1");
p.addName().setFamily("family2");
p.addName().setFamily("family3");
myPatientDao.update(p);
SearchParameterMap searchParamMap = new SearchParameterMap();
searchParamMap.setLoadSynchronous(true);
searchParamMap.add(Patient.SP_FAMILY, new StringParam("family2"));
assertEquals(1, myPatientDao.search(searchParamMap).size().intValue());
runInTransaction(()->{
ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 3);
assertNotNull(historyEntry);
myResourceHistoryTableDao.delete(historyEntry);
});
Long jobId = myResourceReindexingSvc.markAllResourcesForReindexing();
myResourceReindexingSvc.forceReindexingPass();
searchParamMap = new SearchParameterMap();
searchParamMap.setLoadSynchronous(true);
searchParamMap.add(Patient.SP_FAMILY, new StringParam("family2"));
IBundleProvider search = myPatientDao.search(searchParamMap);
assertEquals(1, search.size().intValue());
p = (Patient) search.getResources(0, 1).get(0);
assertEquals("3", p.getIdElement().getVersionIdPart());
}
@Test
public void testSystemMetaOperation() {

View File

@ -21,8 +21,11 @@ package ca.uhn.fhir.jpa.migrate;
*/
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTableColumnTypeTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTableTask;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.dialect.internal.StandardDialectResolver;
import org.hibernate.engine.jdbc.dialect.spi.DatabaseMetaDataDialectResolutionInfoAdapter;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.ColumnMapRowMapper;
@ -188,9 +191,9 @@ public class JdbcUtils {
}
}
/**
* Retrieve all index names
*/
/**
* Retrieve all index names
*/
public static Set<String> getColumnNames(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
try (Connection connection = dataSource.getConnection()) {
@ -220,6 +223,43 @@ public class JdbcUtils {
}
}
public static Set<String> getSequenceNames(DriverTypeEnum.ConnectionProperties theConnectionProperties) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
try (Connection connection = dataSource.getConnection()) {
return theConnectionProperties.getTxTemplate().execute(t -> {
try {
DialectResolver dialectResolver = new StandardDialectResolver();
Dialect dialect = dialectResolver.resolveDialect(new DatabaseMetaDataDialectResolutionInfoAdapter(connection.getMetaData()));
Set<String> sequenceNames = new HashSet<>();
if (dialect.supportsSequences()) {
String sql = dialect.getQuerySequencesString();
if (sql != null) {
Statement statement = null;
ResultSet rs = null;
try {
statement = connection.createStatement();
rs = statement.executeQuery(sql);
while (rs.next()) {
sequenceNames.add(rs.getString(1).toUpperCase());
}
} finally {
if (rs != null) rs.close();
if (statement != null) statement.close();
}
}
}
return sequenceNames;
} catch (SQLException e ) {
throw new InternalErrorException(e);
}
});
}
}
public static Set<String> getTableNames(DriverTypeEnum.ConnectionProperties theConnectionProperties) throws SQLException {
DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource());
try (Connection connection = dataSource.getConnection()) {

View File

@ -116,4 +116,7 @@ public class Migrator {
}
public void addTasks(List<BaseTask<?>> theTasks) {
theTasks.forEach(this::addTask);
}
}

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
*/
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -40,11 +41,7 @@ public class AddColumnTask extends BaseTableColumnTypeTask<AddColumnTask> {
return;
}
String type = getSqlType();
String nullable = getSqlNotNull();
if (isNullable()) {
nullable = "";
}
String typeStatement = getTypeStatement();
String sql = "";
switch (getDriverType()) {
@ -52,16 +49,25 @@ public class AddColumnTask extends BaseTableColumnTypeTask<AddColumnTask> {
case MARIADB_10_1:
case MYSQL_5_7:
case POSTGRES_9_4:
sql = "alter table " + getTableName() + " add column " + getColumnName() + " " + type + " " + nullable;
sql = "alter table " + getTableName() + " add column " + getColumnName() + " " + typeStatement;
break;
case MSSQL_2012:
case ORACLE_12C:
sql = "alter table " + getTableName() + " add " + getColumnName() + " " + type + " " + nullable;
sql = "alter table " + getTableName() + " add " + getColumnName() + " " + typeStatement;
break;
}
ourLog.info("Adding column {} of type {} to table {}", getColumnName(), type, getTableName());
ourLog.info("Adding column {} of type {} to table {}", getColumnName(), getSqlType(), getTableName());
executeSql(getTableName(), sql);
}
public String getTypeStatement() {
String type = getSqlType();
String nullable = getSqlNotNull();
if (isNullable()) {
nullable = "";
}
return type + " " + nullable;
}
}

View File

@ -0,0 +1,91 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
/*-
* #%L
* HAPI FHIR JPA Server - Migration
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.Set;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class AddIdGeneratorTask extends BaseTask<AddIdGeneratorTask> {
private static final Logger ourLog = LoggerFactory.getLogger(AddIdGeneratorTask.class);
private final String myGeneratorName;
public AddIdGeneratorTask(String theGeneratorName) {
myGeneratorName = theGeneratorName;
}
@Override
public void validate() {
Validate.notBlank(myGeneratorName);
}
@Override
public void execute() throws SQLException {
Set<String> tableNames = JdbcUtils.getTableNames(getConnectionProperties());
String sql = null;
switch (getDriverType()) {
case MARIADB_10_1:
case MYSQL_5_7:
// These require a separate table
if (!tableNames.contains(myGeneratorName)) {
String creationSql = "create table " + myGeneratorName + " ( next_val bigint ) engine=InnoDB";
executeSql(myGeneratorName, creationSql);
String initSql = "insert into " + myGeneratorName + " values ( 1 )";
executeSql(myGeneratorName, initSql);
}
break;
case DERBY_EMBEDDED:
sql = "create sequence " + myGeneratorName + " start with 1 increment by 50";
break;
case POSTGRES_9_4:
sql = "create sequence " + myGeneratorName + " start 1 increment 50";
break;
case ORACLE_12C:
sql = "create sequence " + myGeneratorName + " start with 1 increment by 50";
break;
case MSSQL_2012:
sql = "create sequence " + myGeneratorName + " start with 1 increment by 50";
break;
}
if (isNotBlank(sql)) {
if (JdbcUtils.getSequenceNames(getConnectionProperties()).contains(myGeneratorName)) {
ourLog.info("Sequence {} already exists - No action performed", myGeneratorName);
return;
}
executeSql(myGeneratorName, sql);
}
}
}

View File

@ -0,0 +1,91 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
/*-
* #%L
* HAPI FHIR JPA Server - Migration
* %%
* Copyright (C) 2014 - 2018 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
public class AddTableByColumnTask extends BaseTableTask<AddTableByColumnTask> {
private static final Logger ourLog = LoggerFactory.getLogger(AddTableByColumnTask.class);
private List<AddColumnTask> myAddColumnTasks = new ArrayList<>();
private String myPkColumn;
public void addAddColumnTask(AddColumnTask theTask) {
myAddColumnTasks.add(theTask);
}
public void setPkColumn(String thePkColumn) {
myPkColumn = thePkColumn;
}
@Override
public void execute() throws SQLException {
if (JdbcUtils.getTableNames(getConnectionProperties()).contains(getTableName())) {
ourLog.info("Already have table named {} - No action performed", getTableName());
return;
}
StringBuilder sb = new StringBuilder();
sb.append("CREATE TABLE ");
sb.append(getTableName());
sb.append(" ( ");
for (AddColumnTask next : myAddColumnTasks) {
next.setDriverType(getDriverType());
next.setTableName(getTableName());
next.validate();
sb.append(next.getColumnName());
sb.append(" ");
sb.append(next.getTypeStatement());
sb.append(", ");
}
sb.append(" PRIMARY KEY (");
sb.append(myPkColumn);
sb.append(")");
sb.append(" ) ");
switch (getDriverType()) {
case MARIADB_10_1:
case MYSQL_5_7:
sb.append("engine=InnoDB");
break;
case DERBY_EMBEDDED:
case POSTGRES_9_4:
case ORACLE_12C:
case MSSQL_2012:
break;
}
executeSql(getTableName(), sb.toString());
}
}

View File

@ -31,9 +31,9 @@ import org.springframework.jdbc.core.JdbcTemplate;
import java.sql.SQLException;
import java.util.*;
public class AddTableTask extends BaseTableTask<AddTableTask> {
public class AddTableRawSqlTask extends BaseTableTask<AddTableRawSqlTask> {
private static final Logger ourLog = LoggerFactory.getLogger(AddTableTask.class);
private static final Logger ourLog = LoggerFactory.getLogger(AddTableRawSqlTask.class);
private Map<DriverTypeEnum, List<String>> myDriverToSqls = new HashMap<>();
public void addSql(DriverTypeEnum theDriverType, @Language("SQL") String theSql) {

View File

@ -115,8 +115,9 @@ public abstract class BaseTableColumnTypeTask<T extends BaseTableTask> extends B
return myNullable;
}
public void setNullable(boolean theNullable) {
public T setNullable(boolean theNullable) {
myNullable = theNullable;
return (T) this;
}
protected String getSqlNotNull() {
@ -127,8 +128,9 @@ public abstract class BaseTableColumnTypeTask<T extends BaseTableTask> extends B
return myColumnLength;
}
public void setColumnLength(int theColumnLength) {
public BaseTableColumnTypeTask<T> setColumnLength(int theColumnLength) {
myColumnLength = (long) theColumnLength;
return this;
}

View File

@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.migrate.tasks;
* #L%
*/
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.model.entity.*;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.AddColumnTask;
@ -74,11 +73,11 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.nullable()
.type(BaseTableColumnTypeTask.ColumnTypeEnum.INT);
version.addTable("HFJ_RES_REINDEX_JOB")
version.addTableRawSql("HFJ_RES_REINDEX_JOB")
.addSql(DriverTypeEnum.MSSQL_2012, "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime2, UPDATE_THRESHOLD_HIGH datetime2 not null, UPDATE_THRESHOLD_LOW datetime2, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED boolean not null, RES_TYPE varchar(255), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))")
.addSql(DriverTypeEnum.MARIADB_10_1, "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime(6), UPDATE_THRESHOLD_HIGH datetime(6) not null, UPDATE_THRESHOLD_LOW datetime(6), primary key (PID))")
.addSql(DriverTypeEnum.POSTGRES_9_4, "persistence_create_postgres94.sql:create table HFJ_RES_REINDEX_JOB (PID int8 not null, JOB_DELETED boolean not null, RES_TYPE varchar(255), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))")
.addSql(DriverTypeEnum.POSTGRES_9_4, "create table HFJ_RES_REINDEX_JOB (PID int8 not null, JOB_DELETED boolean not null, RES_TYPE varchar(255), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))")
.addSql(DriverTypeEnum.MYSQL_5_7, " create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime(6), UPDATE_THRESHOLD_HIGH datetime(6) not null, UPDATE_THRESHOLD_LOW datetime(6), primary key (PID))")
.addSql(DriverTypeEnum.ORACLE_12C, "create table HFJ_RES_REINDEX_JOB (PID number(19,0) not null, JOB_DELETED number(1,0) not null, RES_TYPE varchar2(255 char), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))");
@ -367,7 +366,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Concept Designation
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_DESIG");
version
.addTable("TRM_CONCEPT_DESIG")
.addTableRawSql("TRM_CONCEPT_DESIG")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
@ -390,7 +389,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Concept Property
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_PROPERTY");
version
.addTable("TRM_CONCEPT_PROPERTY")
.addTableRawSql("TRM_CONCEPT_PROPERTY")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT")
@ -413,7 +412,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Concept Map - Map
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP");
version
.addTable("TRM_CONCEPT_MAP")
.addTableRawSql("TRM_CONCEPT_MAP")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE")
.addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))")
@ -435,7 +434,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Concept Map - Group
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GROUP");
version
.addTable("TRM_CONCEPT_MAP_GROUP")
.addTableRawSql("TRM_CONCEPT_MAP_GROUP")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create unique index IDX_CONCEPT_MAP_URL on TRM_CONCEPT_MAP (URL)")
@ -453,7 +452,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Concept Map - Group Element
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GRP_ELEMENT");
version
.addTable("TRM_CONCEPT_MAP_GRP_ELEMENT")
.addTableRawSql("TRM_CONCEPT_MAP_GRP_ELEMENT")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP")
.addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))")
@ -476,7 +475,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// Concept Map - Group Element Target
version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GRP_ELM_TGT");
version
.addTable("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.addTableRawSql("TRM_CONCEPT_MAP_GRP_ELM_TGT")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT")
.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)")

View File

@ -61,31 +61,35 @@ public class BaseMigrationTasks<T extends Enum> {
}
protected Builder forVersion(T theVersion) {
return new Builder(theVersion);
IAcceptsTasks sink = theTask -> {
theTask.validate();
myTasks.put(theVersion, theTask);
};
return new Builder(sink);
}
protected class Builder {
public interface IAcceptsTasks {
void addTask(BaseTask<?> theTask);
}
private final T myVersion;
private String myTableName;
protected static class Builder {
Builder(T theVersion) {
myVersion = theVersion;
private final IAcceptsTasks mySink;
public Builder(IAcceptsTasks theSink) {
mySink = theSink;
}
public BuilderWithTableName onTable(String theTableName) {
myTableName = theTableName;
return new BuilderWithTableName();
return new BuilderWithTableName(mySink, theTableName);
}
public void addTask(BaseTask theTask) {
theTask.validate();
myTasks.put(myVersion, theTask);
public void addTask(BaseTask<?> theTask) {
mySink.addTask(theTask);
}
public BuilderAddTable addTable(String theTableName) {
myTableName = theTableName;
return new BuilderAddTable();
public BuilderAddTableRawSql addTableRawSql(String theTableName) {
return new BuilderAddTableRawSql(theTableName);
}
public Builder startSectionWithMessage(String theMessage) {
@ -94,10 +98,23 @@ public class BaseMigrationTasks<T extends Enum> {
return this;
}
public class BuilderWithTableName {
private String myIndexName;
private String myColumnName;
private String myForeignKeyName;
public BuilderAddTableByColumns addTableByColumns(String theTableName, String thePkColumnName) {
return new BuilderAddTableByColumns(mySink, theTableName, thePkColumnName);
}
public void addIdGenerator(String theGeneratorName) {
AddIdGeneratorTask task = new AddIdGeneratorTask(theGeneratorName);
addTask(task);
}
public static class BuilderWithTableName implements IAcceptsTasks {
private final String myTableName;
private final IAcceptsTasks mySink;
public BuilderWithTableName(IAcceptsTasks theSink, String theTableName) {
mySink = theSink;
myTableName = theTableName;
}
public String getTableName() {
return myTableName;
@ -111,13 +128,11 @@ public class BaseMigrationTasks<T extends Enum> {
}
public BuilderAddIndexWithName addIndex(String theIndexName) {
myIndexName = theIndexName;
return new BuilderAddIndexWithName();
return new BuilderAddIndexWithName(theIndexName);
}
public BuilderAddColumnWithName addColumn(String theColumnName) {
myColumnName = theColumnName;
return new BuilderAddColumnWithName();
return new BuilderAddColumnWithName(theColumnName, this);
}
public void dropColumn(String theColumnName) {
@ -128,30 +143,38 @@ public class BaseMigrationTasks<T extends Enum> {
addTask(task);
}
public void addTask(BaseTableTask<?> theTask) {
theTask.setTableName(myTableName);
Builder.this.addTask(theTask);
@Override
public void addTask(BaseTask<?> theTask) {
((BaseTableTask<?>)theTask).setTableName(myTableName);
mySink.addTask(theTask);
}
public BuilderModifyColumnWithName modifyColumn(String theColumnName) {
myColumnName = theColumnName;
return new BuilderModifyColumnWithName();
return new BuilderModifyColumnWithName(theColumnName);
}
public BuilderAddForeignKey addForeignKey(String theForeignKeyName) {
myForeignKeyName = theForeignKeyName;
return new BuilderAddForeignKey();
return new BuilderAddForeignKey(theForeignKeyName);
}
public class BuilderAddIndexWithName {
private boolean myUnique;
private final String myIndexName;
public BuilderAddIndexWithName(String theIndexName) {
myIndexName = theIndexName;
}
public BuilderAddIndexUnique unique(boolean theUnique) {
myUnique = theUnique;
return new BuilderAddIndexUnique();
return new BuilderAddIndexUnique(theUnique);
}
public class BuilderAddIndexUnique {
private final boolean myUnique;
public BuilderAddIndexUnique(boolean theUnique) {
myUnique = theUnique;
}
public void withColumns(String... theColumnNames) {
AddIndexTask task = new AddIndexTask();
task.setTableName(myTableName);
@ -163,15 +186,30 @@ public class BaseMigrationTasks<T extends Enum> {
}
}
public class BuilderAddColumnWithName {
private boolean myNullable;
public static class BuilderAddColumnWithName {
private final String myColumnName;
private final IAcceptsTasks myTaskSink;
public BuilderAddColumnWithName(String theColumnName, IAcceptsTasks theTaskSink) {
myColumnName = theColumnName;
myTaskSink = theTaskSink;
}
public BuilderAddColumnWithNameNullable nullable() {
myNullable = true;
return new BuilderAddColumnWithNameNullable();
return new BuilderAddColumnWithNameNullable(true);
}
public BuilderAddColumnWithNameNullable nonNullable() {
return new BuilderAddColumnWithNameNullable(false);
}
public class BuilderAddColumnWithNameNullable {
private final boolean myNullable;
public BuilderAddColumnWithNameNullable(boolean theNullable) {
myNullable = theNullable;
}
public void type(AddColumnTask.ColumnTypeEnum theColumnType) {
type(theColumnType, null);
}
@ -184,27 +222,39 @@ public class BaseMigrationTasks<T extends Enum> {
if (theLength != null) {
task.setColumnLength(theLength);
}
addTask(task);
myTaskSink.addTask(task);
}
}
}
public class BuilderModifyColumnWithName {
private boolean myNullable;
private final String myColumnName;
public BuilderModifyColumnWithName(String theColumnName) {
myColumnName = theColumnName;
}
public String getColumnName() {
return myColumnName;
}
public BuilderModifyColumnWithNameAndNullable nullable() {
myNullable = true;
return new BuilderModifyColumnWithNameAndNullable();
return new BuilderModifyColumnWithNameAndNullable(true);
}
public BuilderModifyColumnWithNameAndNullable nonNullable() {
myNullable = false;
return new BuilderModifyColumnWithNameAndNullable();
return new BuilderModifyColumnWithNameAndNullable(false);
}
public class BuilderModifyColumnWithNameAndNullable {
private final boolean myNullable;
public BuilderModifyColumnWithNameAndNullable(boolean theNullable) {
myNullable = theNullable;
}
public void withType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType) {
withType(theColumnType, null);
}
@ -235,18 +285,27 @@ public class BaseMigrationTasks<T extends Enum> {
}
}
public class BuilderAddForeignKey extends BuilderModifyColumnWithName {
public BuilderAddForeignKeyToColumn toColumn(String theColumnName) {
myColumnName = theColumnName;
return new BuilderAddForeignKeyToColumn();
public class BuilderAddForeignKey {
private final String myForeignKeyName;
public BuilderAddForeignKey(String theForeignKeyName) {
myForeignKeyName = theForeignKeyName;
}
public class BuilderAddForeignKeyToColumn {
public BuilderAddForeignKeyToColumn toColumn(String theColumnName) {
return new BuilderAddForeignKeyToColumn(theColumnName);
}
public class BuilderAddForeignKeyToColumn extends BuilderModifyColumnWithName {
public BuilderAddForeignKeyToColumn(String theColumnName) {
super(theColumnName);
}
public void references(String theForeignTable, String theForeignColumn) {
AddForeignKeyTask task = new AddForeignKeyTask();
task.setTableName(myTableName);
task.setConstraintName(myForeignKeyName);
task.setColumnName(myColumnName);
task.setColumnName(getColumnName());
task.setForeignTableName(theForeignTable);
task.setForeignColumnName(theForeignColumn);
addTask(task);
@ -255,23 +314,43 @@ public class BaseMigrationTasks<T extends Enum> {
}
}
public class BuilderAddTable {
public class BuilderAddTableRawSql {
private final AddTableTask myTask;
private final AddTableRawSqlTask myTask;
protected BuilderAddTable() {
myTask = new AddTableTask();
myTask.setTableName(myTableName);
protected BuilderAddTableRawSql(String theTableName) {
myTask = new AddTableRawSqlTask();
myTask.setTableName(theTableName);
addTask(myTask);
}
public BuilderAddTable addSql(DriverTypeEnum theDriverTypeEnum, @Language("SQL") String theSql) {
public BuilderAddTableRawSql addSql(DriverTypeEnum theDriverTypeEnum, @Language("SQL") String theSql) {
myTask.addSql(theDriverTypeEnum, theSql);
return this;
}
}
public class BuilderAddTableByColumns implements IAcceptsTasks {
private final AddTableByColumnTask myTask;
public BuilderAddTableByColumns(IAcceptsTasks theSink, String theTableName, String thePkColumnName) {
myTask = new AddTableByColumnTask();
myTask.setTableName(theTableName);
myTask.setPkColumn(thePkColumnName);
theSink.addTask(myTask);
}
public BuilderWithTableName.BuilderAddColumnWithName addColumn(String theColumnName) {
return new BuilderWithTableName.BuilderAddColumnWithName(theColumnName, this);
}
@Override
public void addTask(BaseTask<?> theTask) {
myTask.addAddColumnTask((AddColumnTask) theTask);
}
}
}
}

View File

@ -0,0 +1,48 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import org.junit.Test;
import java.sql.SQLException;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.assertThat;
public class AddIdGeneratorTaskTest extends BaseTest {
@Test
public void testAddIdGenerator() throws SQLException {
assertThat(JdbcUtils.getSequenceNames(getConnectionProperties()), empty());
MyMigrationTasks migrator = new MyMigrationTasks();
getMigrator().addTasks(migrator.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().migrate();
assertThat(JdbcUtils.getSequenceNames(getConnectionProperties()), containsInAnyOrder("SEQ_FOO"));
// Second time, should produce no action
migrator = new MyMigrationTasks();
getMigrator().addTasks(migrator.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().migrate();
assertThat(JdbcUtils.getSequenceNames(getConnectionProperties()), containsInAnyOrder("SEQ_FOO"));
}
private static class MyMigrationTasks extends BaseMigrationTasks<VersionEnum> {
public MyMigrationTasks() {
Builder v = forVersion(VersionEnum.V3_5_0);
v.addIdGenerator("SEQ_FOO");
}
}
}

View File

@ -0,0 +1,39 @@
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.util.VersionEnum;
import org.junit.Test;
import java.sql.SQLException;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertThat;
public class AddTableByColumnTaskTest extends BaseTest {
@Test
public void testAddTable() throws SQLException {
MyMigrationTasks migrator = new MyMigrationTasks();
getMigrator().addTasks(migrator.getTasks(VersionEnum.V3_3_0, VersionEnum.V3_6_0));
getMigrator().migrate();
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), containsInAnyOrder("FOO_TABLE"));
}
private static class MyMigrationTasks extends BaseMigrationTasks<VersionEnum> {
public MyMigrationTasks() {
Builder v = forVersion(VersionEnum.V3_5_0);
Builder.BuilderAddTableByColumns fooTable = v.addTableByColumns("FOO_TABLE", "PID");
fooTable.addColumn("PID").nonNullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.LONG);
fooTable.addColumn("HELLO").nullable().type(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 200);
}
}
}

View File

@ -14,7 +14,7 @@ public class AddTableTest extends BaseTest {
@Test
public void testTableDoesntAlreadyExist() throws SQLException {
AddTableTask task = new AddTableTask();
AddTableRawSqlTask task = new AddTableRawSqlTask();
task.setTableName("SOMETABLE");
task.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
getMigrator().addTask(task);
@ -29,7 +29,7 @@ public class AddTableTest extends BaseTest {
executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
assertThat(JdbcUtils.getTableNames(getConnectionProperties()), containsInAnyOrder("SOMETABLE"));
AddTableTask task = new AddTableTask();
AddTableRawSqlTask task = new AddTableRawSqlTask();
task.setTableName("SOMETABLE");
task.addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))");
getMigrator().addTask(task);

View File

@ -112,6 +112,16 @@
instead of
<![CDATA[<code>QuestionnaireResponse?subject.name=smith</code>]]>.
</action>
<action type="add">
The LOINC uploader has been updated to suport the LOINC 2.65 release
file format.
</action>
<action type="add">
The resource reindexer can now detect when a resource's current version no longer
exists in the database (e.g. because it was manually expunged), and can automatically
adjust the most recent version to
account for this.
</action>
</release>
<release version="3.6.0" date="2018-11-12" description="Food">
<action type="add">