Handle conditional creates across partitions with duplicate search URLs by updating ResourceSearchUrlEntity (#5983)

* First commit.

* SQLs in test class work for Postgres.

* Refine DropPrimaryKeyTask.  Add migration code to do all operations except drop and add primary key.   Comment out stub test code to do all non drop and add primary key operations.  Add verification code to test to verify schema and record counts.

* Oracle drop primary key working.

* Oracle drop primary key working in test code.  Proper drop primary and add primary key tasks working in H2.

* Migration tests work on all 4 databases.

* Spotless.

* Fix compile error.

* Add hapi error codes for new Exceptions.

* Add another hapi error code.

* Cleanup TODOs.  Fix error codes.  Spotless.

* Add new partition conditional create test.  Fix bug from merging tasks class.  Add new unit Add changelog without hapi-fhir issue.

* Add test for conditional create with partitions.

* Fix TODOs.  Reverse unneeded changes.  Rename changelog.  Refinements.  Javadoc.

* Tweak changelog.

* Rename config key internally.  Spotless.

* Fix a newly introduced bug.  Cleanup.  Tweak changelog.

* Small fix.  Spotless.

* Fix compile error.

* Spotless.

* Address code review comments.

* Address more code review comments.

* Fix test.

* Fix test.
This commit is contained in:
Luke deGruchy 2024-06-25 12:11:47 -04:00 committed by GitHub
parent 11d61a5f72
commit d4e3698f37
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 770 additions and 33 deletions

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 6033
jira: SMILE-8429
title: "Previously, attempting to store resources with common identifies but different partitions would.
This has been fixed by adding a new configuration key defaulting to false to allow storing resources with duplicate identifiers across partitions.
This new feature can be activated by calling PartitionSettings.setConditionalCreateDuplicateIdentifiersEnabled()"

View File

@ -80,6 +80,7 @@ import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices;
import ca.uhn.fhir.jpa.interceptor.OverridePathBasedReferentialIntegrityForDeletesInterceptor;
import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.packages.IHapiPackageCacheManager;
@ -857,12 +858,14 @@ public class JpaConfig {
PersistenceContextProvider thePersistenceContextProvider,
IResourceSearchUrlDao theResourceSearchUrlDao,
MatchUrlService theMatchUrlService,
FhirContext theFhirContext) {
FhirContext theFhirContext,
PartitionSettings thePartitionSettings) {
return new ResourceSearchUrlSvc(
thePersistenceContextProvider.getEntityManager(),
theResourceSearchUrlDao,
theMatchUrlService,
theFhirContext);
theFhirContext,
thePartitionSettings);
}
@Bean

View File

@ -128,7 +128,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
protected void init740() {
// Start of migrations from 7.2 to 7.4
Builder version = forVersion(VersionEnum.V7_4_0);
final Builder version = forVersion(VersionEnum.V7_4_0);
{
version.onTable("HFJ_RES_SEARCH_URL")
@ -348,6 +348,30 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
{
// Please see https://github.com/hapifhir/hapi-fhir/issues/6033 for why we're doing this
version.onTable("HFJ_RES_SEARCH_URL")
.addColumn("20240618.2", "PARTITION_ID", -1)
.nullable()
.type(ColumnTypeEnum.INT);
version.onTable("HFJ_RES_SEARCH_URL")
.addColumn("20240618.3", "PARTITION_DATE")
.nullable()
.type(ColumnTypeEnum.DATE_ONLY);
version.executeRawSql("20240618.4", "UPDATE HFJ_RES_SEARCH_URL SET PARTITION_ID = -1");
version.onTable("HFJ_RES_SEARCH_URL")
.modifyColumn("20240618.5", "PARTITION_ID")
.nonNullable()
.withType(ColumnTypeEnum.INT);
version.onTable("HFJ_RES_SEARCH_URL").dropPrimaryKey("20240618.6");
version.onTable("HFJ_RES_SEARCH_URL").addPrimaryKey("20240618.7", "RES_SEARCH_URL", "PARTITION_ID");
}
}
{

View File

@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.search;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
@ -51,16 +52,19 @@ public class ResourceSearchUrlSvc {
private final MatchUrlService myMatchUrlService;
private final FhirContext myFhirContext;
private final PartitionSettings myPartitionSettings;
public ResourceSearchUrlSvc(
EntityManager theEntityManager,
IResourceSearchUrlDao theResourceSearchUrlDao,
MatchUrlService theMatchUrlService,
FhirContext theFhirContext) {
FhirContext theFhirContext,
PartitionSettings thePartitionSettings) {
myEntityManager = theEntityManager;
myResourceSearchUrlDao = theResourceSearchUrlDao;
myMatchUrlService = theMatchUrlService;
myFhirContext = theFhirContext;
myPartitionSettings = thePartitionSettings;
}
/**
@ -87,8 +91,10 @@ public class ResourceSearchUrlSvc {
String theResourceName, String theMatchUrl, ResourceTable theResourceTable) {
String canonicalizedUrlForStorage = createCanonicalizedUrlForStorage(theResourceName, theMatchUrl);
ResourceSearchUrlEntity searchUrlEntity =
ResourceSearchUrlEntity.from(canonicalizedUrlForStorage, theResourceTable);
ResourceSearchUrlEntity searchUrlEntity = ResourceSearchUrlEntity.from(
canonicalizedUrlForStorage,
theResourceTable,
myPartitionSettings.isConditionalCreateDuplicateIdentifiersEnabled());
// calling dao.save performs a merge operation which implies a trip to
// the database to see if the resource exists. Since we don't need the check, we avoid the trip by calling
// em.persist.

View File

@ -32,6 +32,7 @@ public class PartitionSettings {
private boolean myUnnamedPartitionMode;
private Integer myDefaultPartitionId;
private boolean myAlwaysOpenNewTransactionForDifferentPartition;
private boolean myConditionalCreateDuplicateIdentifiersEnabled = false;
/**
* Should we always open a new database transaction if the partition context changes
@ -171,6 +172,15 @@ public class PartitionSettings {
PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED);
}
public boolean isConditionalCreateDuplicateIdentifiersEnabled() {
return myConditionalCreateDuplicateIdentifiersEnabled;
}
public void setConditionalCreateDuplicateIdentifiersEnabled(
boolean theConditionalCreateDuplicateIdentifiersEnabled) {
myConditionalCreateDuplicateIdentifiersEnabled = theConditionalCreateDuplicateIdentifiersEnabled;
}
public enum CrossPartitionReferenceMode {
/**

View File

@ -20,10 +20,10 @@
package ca.uhn.fhir.jpa.model.entity;
import jakarta.persistence.Column;
import jakarta.persistence.EmbeddedId;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.ForeignKey;
import jakarta.persistence.Id;
import jakarta.persistence.Index;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
@ -31,7 +31,9 @@ import jakarta.persistence.Table;
import jakarta.persistence.Temporal;
import jakarta.persistence.TemporalType;
import java.time.LocalDate;
import java.util.Date;
import java.util.Optional;
/**
* This entity is used to enforce uniqueness on a given search URL being
@ -52,12 +54,12 @@ import java.util.Date;
public class ResourceSearchUrlEntity {
public static final String RES_SEARCH_URL_COLUMN_NAME = "RES_SEARCH_URL";
public static final String PARTITION_ID = "PARTITION_ID";
public static final int RES_SEARCH_URL_LENGTH = 768;
@Id
@Column(name = RES_SEARCH_URL_COLUMN_NAME, length = RES_SEARCH_URL_LENGTH, nullable = false)
private String mySearchUrl;
@EmbeddedId
private ResourceSearchUrlEntityPK myPk;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(
@ -70,17 +72,35 @@ public class ResourceSearchUrlEntity {
@Column(name = "RES_ID", updatable = false, nullable = false, insertable = false)
private Long myResourcePid;
@Column(name = "PARTITION_DATE", nullable = true, insertable = true, updatable = false)
private LocalDate myPartitionDate;
@Column(name = "CREATED_TIME", nullable = false)
@Temporal(TemporalType.TIMESTAMP)
private Date myCreatedTime;
public static ResourceSearchUrlEntity from(String theUrl, ResourceTable theResourceTable) {
public static ResourceSearchUrlEntity from(
String theUrl, ResourceTable theResourceTable, boolean theSearchUrlDuplicateAcrossPartitionsEnabled) {
return new ResourceSearchUrlEntity()
.setPk(ResourceSearchUrlEntityPK.from(
theUrl, theResourceTable, theSearchUrlDuplicateAcrossPartitionsEnabled))
.setPartitionDate(Optional.ofNullable(theResourceTable.getPartitionId())
.map(PartitionablePartitionId::getPartitionDate)
.orElse(null))
.setResourceTable(theResourceTable)
.setSearchUrl(theUrl)
.setCreatedTime(new Date());
}
public ResourceSearchUrlEntityPK getPk() {
return myPk;
}
public ResourceSearchUrlEntity setPk(ResourceSearchUrlEntityPK thePk) {
myPk = thePk;
return this;
}
public Long getResourcePid() {
if (myResourcePid != null) {
return myResourcePid;
@ -112,11 +132,19 @@ public class ResourceSearchUrlEntity {
}
public String getSearchUrl() {
return mySearchUrl;
return myPk.getSearchUrl();
}
public ResourceSearchUrlEntity setSearchUrl(String theSearchUrl) {
mySearchUrl = theSearchUrl;
public Integer getPartitionId() {
return myPk.getPartitionId();
}
public LocalDate getPartitionDate() {
return myPartitionDate;
}
public ResourceSearchUrlEntity setPartitionDate(LocalDate thePartitionDate) {
myPartitionDate = thePartitionDate;
return this;
}
}

View File

@ -0,0 +1,118 @@
/*-
* #%L
* HAPI FHIR JPA Model
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.model.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Embeddable;
import java.io.Serializable;
import java.util.Objects;
import java.util.Optional;
import java.util.StringJoiner;
/**
* Multi-column primary Key for {@link ResourceSearchUrlEntity}
*/
@Embeddable
public class ResourceSearchUrlEntityPK implements Serializable {
public static final String RES_SEARCH_URL_COLUMN_NAME = "RES_SEARCH_URL";
public static final String PARTITION_ID_COLUMN_NAME = "PARTITION_ID";
public static final int RES_SEARCH_URL_LENGTH = 768;
private static final long serialVersionUID = 1L;
private static final int PARTITION_ID_NULL_EQUIVALENT = -1;
@Column(name = RES_SEARCH_URL_COLUMN_NAME, length = RES_SEARCH_URL_LENGTH, nullable = false)
// Weird field name isto ensure that this the first key in the index
private String my_A_SearchUrl;
@Column(name = PARTITION_ID_COLUMN_NAME, nullable = false, insertable = true, updatable = false)
// Weird field name isto ensure that this the second key in the index
private Integer my_B_PartitionId;
public ResourceSearchUrlEntityPK() {}
public static ResourceSearchUrlEntityPK from(
String theSearchUrl, ResourceTable theResourceTable, boolean theSearchUrlDuplicateAcrossPartitionsEnabled) {
return new ResourceSearchUrlEntityPK(
theSearchUrl,
computePartitionIdOrNullEquivalent(theResourceTable, theSearchUrlDuplicateAcrossPartitionsEnabled));
}
public ResourceSearchUrlEntityPK(String theSearchUrl, int thePartitionId) {
my_A_SearchUrl = theSearchUrl;
my_B_PartitionId = thePartitionId;
}
public String getSearchUrl() {
return my_A_SearchUrl;
}
public void setSearchUrl(String theMy_A_SearchUrl) {
my_A_SearchUrl = theMy_A_SearchUrl;
}
public Integer getPartitionId() {
return my_B_PartitionId;
}
public void setPartitionId(Integer theMy_B_PartitionId) {
my_B_PartitionId = theMy_B_PartitionId;
}
@Override
public boolean equals(Object theO) {
if (this == theO) {
return true;
}
if (theO == null || getClass() != theO.getClass()) {
return false;
}
ResourceSearchUrlEntityPK that = (ResourceSearchUrlEntityPK) theO;
return Objects.equals(my_A_SearchUrl, that.my_A_SearchUrl)
&& Objects.equals(my_B_PartitionId, that.my_B_PartitionId);
}
@Override
public int hashCode() {
return Objects.hash(my_A_SearchUrl, my_B_PartitionId);
}
@Override
public String toString() {
return new StringJoiner(", ", ResourceSearchUrlEntityPK.class.getSimpleName() + "[", "]")
.add("my_A_SearchUrl='" + my_A_SearchUrl + "'")
.add("my_B_PartitionId=" + my_B_PartitionId)
.toString();
}
private static int computePartitionIdOrNullEquivalent(
ResourceTable theTheResourceTable, boolean theTheSearchUrlDuplicateAcrossPartitionsEnabled) {
if (!theTheSearchUrlDuplicateAcrossPartitionsEnabled) {
return PARTITION_ID_NULL_EQUIVALENT;
}
return Optional.ofNullable(theTheResourceTable.getPartitionId())
.map(PartitionablePartitionId::getPartitionId)
.orElse(PARTITION_ID_NULL_EQUIVALENT);
}
}

View File

@ -41,13 +41,14 @@ import java.util.stream.Collectors;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.fail;
import static org.junit.jupiter.api.Assertions.fail;
public class FhirResourceDaoR4ConcurrentCreateTest extends BaseJpaR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoR4ConcurrentCreateTest.class);
private static final boolean IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE = false;
ThreadGaterPointcutLatch myThreadGaterPointcutLatchInterceptor;
UserRequestRetryVersionConflictsInterceptor myUserRequestRetryVersionConflictsInterceptor;
ResourceConcurrentSubmitterSvc myResourceConcurrentSubmitterSvc;
@ -132,12 +133,12 @@ public class FhirResourceDaoR4ConcurrentCreateTest extends BaseJpaR4Test {
final ResourceTable resTable4 = myResourceTableDao.save(createResTable());
Date tooOldBy10Minutes = cutOffTimeMinus(tenMinutes);
ResourceSearchUrlEntity tooOld1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", resTable1).setCreatedTime(tooOldBy10Minutes);
ResourceSearchUrlEntity tooOld2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", resTable2).setCreatedTime(tooOldBy10Minutes);
ResourceSearchUrlEntity tooOld1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", resTable1, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE).setCreatedTime(tooOldBy10Minutes);
ResourceSearchUrlEntity tooOld2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", resTable2, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE).setCreatedTime(tooOldBy10Minutes);
Date tooNewBy10Minutes = cutOffTimePlus(tenMinutes);
ResourceSearchUrlEntity tooNew1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.446", resTable3).setCreatedTime(tooNewBy10Minutes);
ResourceSearchUrlEntity tooNew2 =ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.447", resTable4).setCreatedTime(tooNewBy10Minutes);
ResourceSearchUrlEntity tooNew1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.446", resTable3, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE).setCreatedTime(tooNewBy10Minutes);
ResourceSearchUrlEntity tooNew2 =ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.447", resTable4, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE).setCreatedTime(tooNewBy10Minutes);
myResourceSearchUrlDao.saveAll(asList(tooOld1, tooOld2, tooNew1, tooNew2));
@ -165,8 +166,8 @@ public class FhirResourceDaoR4ConcurrentCreateTest extends BaseJpaR4Test {
final ResourceTable resTable1 = myResourceTableDao.save(createResTable());
final ResourceTable resTable2 = myResourceTableDao.save(createResTable());
ResourceSearchUrlEntity entry1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", resTable1);
ResourceSearchUrlEntity entry2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", resTable2);
ResourceSearchUrlEntity entry1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", resTable1, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE);
ResourceSearchUrlEntity entry2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", resTable2, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE);
myResourceSearchUrlDao.saveAll(asList(entry1, entry2));
// when

View File

@ -1,9 +1,11 @@
package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
@ -65,6 +67,7 @@ import org.springframework.transaction.support.TransactionTemplate;
import java.math.BigDecimal;
import java.time.Instant;
import java.time.LocalDate;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Arrays;
@ -79,6 +82,7 @@ import java.util.concurrent.Future;
import java.util.stream.Collectors;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertInstanceOf;
@ -1335,6 +1339,62 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
assertRemainingTasks();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void conditionalCreateSameIdentifierCrossPartition(boolean theIsSearchUrlDuplicateAcrossPartitionsEnabled) {
myPartitionSettings.setPartitioningEnabled(true);
myPartitionSettings.setConditionalCreateDuplicateIdentifiersEnabled(theIsSearchUrlDuplicateAcrossPartitionsEnabled);
final PartitionEntity partitionEntity1 = new PartitionEntity();
partitionEntity1.setId(1);
partitionEntity1.setName("Partition-A");
myPartitionDao.save(partitionEntity1);
final PartitionEntity partitionEntity2 = new PartitionEntity();
partitionEntity2.setId(2);
partitionEntity2.setName("Partition-B");
myPartitionDao.save(partitionEntity2);
final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
final String matchUrl = "identifier=http://tempuri.org|1";
bundleBuilder.addTransactionCreateEntry(myTask1, "urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea")
.conditional(matchUrl);
final RequestPartitionId requestPartitionId1 = RequestPartitionId.fromPartitionId(1, LocalDate.now());
final RequestPartitionId requestPartitionId2 = RequestPartitionId.fromPartitionId(2, LocalDate.now());
final List<Bundle.BundleEntryComponent> responseEntries1 = sendBundleAndGetResponse(bundleBuilder.getBundle(), requestPartitionId1);
assertEquals(1, responseEntries1.size());
final Bundle.BundleEntryComponent bundleEntry1 = responseEntries1.get(0);
assertEquals("201 Created", bundleEntry1.getResponse().getStatus());
if (!theIsSearchUrlDuplicateAcrossPartitionsEnabled) {
final IBaseBundle bundle = bundleBuilder.getBundle();
assertThatThrownBy(() -> sendBundleAndGetResponse(bundle, requestPartitionId2)).isInstanceOf(ResourceVersionConflictException.class);
return;
}
final List<Bundle.BundleEntryComponent> responseEntries2 = sendBundleAndGetResponse(bundleBuilder.getBundle(), requestPartitionId2);
assertEquals(1, responseEntries2.size());
final Bundle.BundleEntryComponent bundleEntry2 = responseEntries1.get(0);
assertEquals("201 Created", bundleEntry2.getResponse().getStatus());
final List<ResourceSearchUrlEntity> allSearchUrls = myResourceSearchUrlDao.findAll();
assertThat(allSearchUrls).hasSize(2);
final String resolvedSearchUrl = "Task?identifier=http%3A%2F%2Ftempuri.org%7C1";
final ResourceSearchUrlEntity resourceSearchUrlEntity1 = allSearchUrls.get(0);
final ResourceSearchUrlEntity resourceSearchUrlEntity2 = allSearchUrls.get(1);
assertThat(resourceSearchUrlEntity1.getSearchUrl()).isEqualTo(resolvedSearchUrl);
assertThat(resourceSearchUrlEntity1.getPartitionId()).isEqualTo(partitionEntity1.getId());
assertThat(resourceSearchUrlEntity2.getSearchUrl()).isEqualTo(resolvedSearchUrl);
assertThat(resourceSearchUrlEntity2.getPartitionId()).isEqualTo(partitionEntity2.getId());
}
private void assertRemainingTasks(Task... theExpectedTasks) {
final List<ResourceSearchUrlEntity> searchUrlsPreDelete = myResourceSearchUrlDao.findAll();
@ -1352,6 +1412,14 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
}
}
private List<Bundle.BundleEntryComponent> sendBundleAndGetResponse(IBaseBundle theRequestBundle, RequestPartitionId thePartitionId) {
assertThat(theRequestBundle).isInstanceOf(Bundle.class);
final SystemRequestDetails requestDetails = new SystemRequestDetails();
requestDetails.setRequestPartitionId(thePartitionId);
return mySystemDao.transaction(requestDetails, (Bundle)theRequestBundle).getEntry();
}
private List<Bundle.BundleEntryComponent> sendBundleAndGetResponse(IBaseBundle theRequestBundle) {
assertTrue(theRequestBundle instanceof Bundle);

View File

@ -113,8 +113,7 @@ public abstract class JpaEmbeddedDatabase {
}
public void executeSqlAsBatch(List<String> theStatements) {
try {
Statement statement = myConnection.createStatement();
try (final Statement statement = myConnection.createStatement()) {
for (String sql : theStatements) {
if (!StringUtils.isBlank(sql)) {
statement.addBatch(sql);

View File

@ -10,6 +10,8 @@ import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
import ca.uhn.fhir.system.HapiSystemProperties;
import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
import ca.uhn.fhir.util.VersionEnum;
import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable;
import org.apache.commons.dbcp2.BasicDataSource;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
@ -21,8 +23,19 @@ import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import static ca.uhn.fhir.jpa.embedded.HapiEmbeddedDatabasesExtension.FIRST_TESTED_VERSION;
@ -38,6 +51,20 @@ public class HapiSchemaMigrationTest {
private static final Logger ourLog = LoggerFactory.getLogger(HapiSchemaMigrationTest.class);
public static final String TEST_SCHEMA_NAME = "test";
private static final String METADATA_COLUMN_NAME = "COLUMN_NAME";
private static final String METADATA_DATA_TYPE = "DATA_TYPE";
private static final String METADATA_IS_NULLABLE = "IS_NULLABLE";
private static final String METADATA_DEFAULT_VALUE = "COLUMN_DEF";
private static final String METADATA_IS_NULLABLE_NO = "NO";
private static final String METADATA_IS_NULLABLE_YES = "YES";
private static final String TABLE_HFJ_RES_SEARCH_URL = "HFJ_RES_SEARCH_URL";
private static final String COLUMN_RES_SEARCH_URL = "RES_SEARCH_URL";
private static final String COLUMN_PARTITION_ID = "PARTITION_ID";
private static final String COLUMN_PARTITION_DATE = "PARTITION_DATE";
private static final String NULL_PLACEHOLDER = "[NULL]";
static {
HapiSystemProperties.enableUnitTestMode();
}
@ -92,10 +119,131 @@ public class HapiSchemaMigrationTest {
}
verifyForcedIdMigration(dataSource);
verifyHfjResSearchUrlMigration(database, theDriverType);
}
private static void migrate(DriverTypeEnum theDriverType, DataSource dataSource, HapiMigrationStorageSvc hapiMigrationStorageSvc, VersionEnum to) throws SQLException {
MigrationTaskList migrationTasks = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(new VersionEnum[]{to});
/**
* We start with a single record in HFJ_RES_SEARCH_URL:
* <p/>
* <ul>
* <li>Primary key: ONLY RES_SEARCH_URL</li>
* <li>PK: RES_SEARCH_URL: https://example.com</li>
* <li>CREATED_TIME: 2023-06-29 10:14:39.69</li>
* <li>RES_ID: 1678</li>
* </ul>
* <p/>
* Once the migration is complete, we should have:
* <ul>
* <li>Primary key: RES_SEARCH_URL, PARTITION_ID</li>
* <li>PK: RES_SEARCH_URL: https://example.com</li>
* <li>PK: PARTITION_ID: -1</li>
* <li>CREATED_TIME: 2023-06-29 10:14:39.69</li>
* <li>RES_ID: 1678</li>
* <li>PARTITION_DATE: null</li>
* </ul>
*/
private void verifyHfjResSearchUrlMigration(JpaEmbeddedDatabase theDatabase, DriverTypeEnum theDriverType) throws SQLException {
final List<Map<String, Object>> allCount = theDatabase.query(String.format("SELECT count(*) FROM %s", TABLE_HFJ_RES_SEARCH_URL));
final List<Map<String, Object>> minusOnePartitionCount = theDatabase.query(String.format("SELECT count(*) FROM %s WHERE %s = -1", TABLE_HFJ_RES_SEARCH_URL, COLUMN_PARTITION_ID));
assertThat(minusOnePartitionCount).hasSize(1);
final Collection<Object> queryResultValues = minusOnePartitionCount.get(0).values();
assertThat(queryResultValues).hasSize(1);
final Object queryResultValue = queryResultValues.iterator().next();
assertThat(queryResultValue).isInstanceOf(Number.class);
if (queryResultValue instanceof Number queryResultNumber) {
assertThat(queryResultNumber.intValue()).isEqualTo(1);
}
final Object allCountValue = allCount.get(0).values().iterator().next();
if (allCountValue instanceof Number allCountNumber) {
assertThat(allCountNumber.intValue()).isEqualTo(1);
}
try (final Connection connection = theDatabase.getDataSource().getConnection()) {
final DatabaseMetaData tableMetaData = connection.getMetaData();
final List<Map<String,String>> actualColumnResults = new ArrayList<>();
try (final ResultSet columnsResultSet = tableMetaData.getColumns(null, null, TABLE_HFJ_RES_SEARCH_URL, null)) {
while (columnsResultSet.next()) {
final Map<String, String> columnMap = new HashMap<>();
actualColumnResults.add(columnMap);
extractAndAddToMap(columnsResultSet, columnMap, METADATA_COLUMN_NAME);
extractAndAddToMap(columnsResultSet, columnMap, METADATA_DATA_TYPE);
extractAndAddToMap(columnsResultSet, columnMap, METADATA_IS_NULLABLE);
extractAndAddToMap(columnsResultSet, columnMap, METADATA_DEFAULT_VALUE);
}
}
ourLog.info("6145: actualColumnResults: {}", actualColumnResults);
final List<Map<String,String>> actualPrimaryKeyResults = new ArrayList<>();
try (final ResultSet primaryKeyResultSet = tableMetaData.getPrimaryKeys(null, null, TABLE_HFJ_RES_SEARCH_URL)) {
while (primaryKeyResultSet.next()) {
final Map<String, String> primaryKeyMap = new HashMap<>();
actualPrimaryKeyResults.add(primaryKeyMap);
extractAndAddToMap(primaryKeyResultSet, primaryKeyMap, METADATA_COLUMN_NAME);
}
}
final List<Map<String, String>> expectedPrimaryKeyResults = List.of(
Map.of(METADATA_COLUMN_NAME, COLUMN_RES_SEARCH_URL),
Map.of(METADATA_COLUMN_NAME, COLUMN_PARTITION_ID)
);
assertThat(expectedPrimaryKeyResults).containsAll(actualPrimaryKeyResults);
final List<Map<String, String>> expectedColumnResults = List.of(
addExpectedColumnMetadata(COLUMN_RES_SEARCH_URL, Integer.toString(Types.VARCHAR), METADATA_IS_NULLABLE_NO, null),
addExpectedColumnMetadata("RES_ID", getExpectedSqlTypeForResId(theDriverType), METADATA_IS_NULLABLE_NO, null),
addExpectedColumnMetadata("CREATED_TIME", Integer.toString(Types.TIMESTAMP), METADATA_IS_NULLABLE_NO, null),
addExpectedColumnMetadata(COLUMN_PARTITION_ID, getExpectedSqlTypeForPartitionId(theDriverType), METADATA_IS_NULLABLE_NO, "-1"),
addExpectedColumnMetadata(COLUMN_PARTITION_DATE, getExpectedSqlTypeForPartitionDate(theDriverType), METADATA_IS_NULLABLE_YES, null)
);
assertThat(expectedColumnResults).containsAll(actualColumnResults);
}
}
@Nonnull
private Map<String, String> addExpectedColumnMetadata(String theColumnName, String theDataType, String theNullable, @Nullable String theDefaultValue) {
return Map.of(METADATA_COLUMN_NAME, theColumnName,
METADATA_DATA_TYPE, theDataType,
METADATA_IS_NULLABLE, theNullable,
METADATA_DEFAULT_VALUE, Optional.ofNullable(theDefaultValue)
.orElse(NULL_PLACEHOLDER));
}
private String getExpectedSqlTypeForResId(DriverTypeEnum theDriverType) {
return DriverTypeEnum.ORACLE_12C == theDriverType
? Integer.toString(Types.NUMERIC)
: Integer.toString(Types.BIGINT);
}
private String getExpectedSqlTypeForPartitionId(DriverTypeEnum theDriverType) {
return DriverTypeEnum.ORACLE_12C == theDriverType
? Integer.toString(Types.NUMERIC)
: Integer.toString(Types.INTEGER);
}
private String getExpectedSqlTypeForPartitionDate(DriverTypeEnum theDriverType) {
return DriverTypeEnum.ORACLE_12C == theDriverType
? Integer.toString(Types.TIMESTAMP)
: Integer.toString(Types.DATE);
}
private void extractAndAddToMap(ResultSet theResultSet, Map<String,String> theMap, String theColumn) throws SQLException {
theMap.put(theColumn, Optional.ofNullable(theResultSet.getString(theColumn))
.map(defaultValueNonNull -> defaultValueNonNull.equals("((-1))") ? "-1" : defaultValueNonNull) // MSSQL returns "((-1))" for default value
.map(String::toUpperCase)
.orElse(NULL_PLACEHOLDER));
}
private static void migrate(DriverTypeEnum theDriverType, DataSource dataSource, HapiMigrationStorageSvc hapiMigrationStorageSvc, VersionEnum to) {
MigrationTaskList migrationTasks = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(to);
SchemaMigrator schemaMigrator = new SchemaMigrator(TEST_SCHEMA_NAME, HAPI_FHIR_MIGRATION_TABLENAME, dataSource, new Properties(), migrationTasks, hapiMigrationStorageSvc);
schemaMigrator.setDriverType(theDriverType);
schemaMigrator.createMigrationTableIfRequired();
@ -103,7 +251,7 @@ public class HapiSchemaMigrationTest {
}
/**
* For bug https://github.com/hapifhir/hapi-fhir/issues/5546
* For bug <a href="https://github.com/hapifhir/hapi-fhir/issues/5546">https://github.com/hapifhir/hapi-fhir/issues/5546</a>
*/
private void verifyForcedIdMigration(DataSource theDataSource) throws SQLException {
JdbcTemplate jdbcTemplate = new JdbcTemplate(theDataSource);

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.JdbcUtils;
import jakarta.annotation.Nonnull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -74,17 +75,20 @@ public class AddColumnTask extends BaseTableColumnTypeTask {
case MYSQL_5_7:
case MARIADB_10_1:
// Quote the column name as "SYSTEM" is a reserved word in MySQL
sql = "alter table " + getTableName() + " add column `" + getColumnName() + "` " + typeStatement;
sql = "alter table " + getTableName() + " add column `" + getColumnName() + "` " + typeStatement
+ buildDefaultClauseIfApplicable();
break;
case DERBY_EMBEDDED:
case POSTGRES_9_4:
case COCKROACHDB_21_1:
sql = "alter table " + getTableName() + " add column " + getColumnName() + " " + typeStatement;
sql = "alter table " + getTableName() + " add column " + getColumnName() + " " + typeStatement
+ buildDefaultClauseIfApplicable();
break;
case MSSQL_2012:
case ORACLE_12C:
case H2_EMBEDDED:
sql = "alter table " + getTableName() + " add " + getColumnName() + " " + typeStatement;
sql = "alter table " + getTableName() + " add " + getColumnName() + " " + typeStatement
+ buildDefaultClauseIfApplicable();
break;
default:
throw new IllegalStateException(Msg.code(60));
@ -94,6 +98,11 @@ public class AddColumnTask extends BaseTableColumnTypeTask {
executeSql(getTableName(), sql);
}
@Nonnull
private String buildDefaultClauseIfApplicable() {
return buildString(getDefaultValue(), (obj -> " default " + obj), "");
}
public String getTypeStatement() {
String type = getSqlType();
String nullable = getSqlNotNull();

View File

@ -0,0 +1,78 @@
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.i18n.Msg;
import jakarta.annotation.Nonnull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;
/**
* Migration task that handles cross-database logic for adding a new primary key.
*/
public class AddPrimaryKeyTask extends BaseTableTask {
private static final Logger ourLog = LoggerFactory.getLogger(AddPrimaryKeyTask.class);
private final List<String> myPrimaryKeyColumnsInOrder;
public AddPrimaryKeyTask(
String theProductVersion, String theSchemaVersion, String theTableName, String... theColumnsInOrder) {
super(theProductVersion, theSchemaVersion);
setTableName(theTableName);
myPrimaryKeyColumnsInOrder = Arrays.asList(theColumnsInOrder);
}
@Nonnull
private String generateSql() {
switch (getDriverType()) {
case MYSQL_5_7:
case MARIADB_10_1:
case POSTGRES_9_4:
case DERBY_EMBEDDED:
case H2_EMBEDDED:
case ORACLE_12C:
case MSSQL_2012:
case COCKROACHDB_21_1:
return String.format(
"ALTER TABLE %s ADD PRIMARY KEY (%s)",
getTableName(), String.join(", ", myPrimaryKeyColumnsInOrder));
default:
throw new IllegalStateException(String.format(
"%s Unknown driver type. Cannot add primary key for task %s",
Msg.code(2531), getMigrationVersion()));
}
}
@Override
protected void doExecute() throws SQLException {
logInfo(
ourLog,
"Going to add a primary key on table {} for columns {}",
getTableName(),
myPrimaryKeyColumnsInOrder);
executeSql(getTableName(), generateSql());
}
}

View File

@ -19,18 +19,24 @@
*/
package ca.uhn.fhir.jpa.migrate.taskdef;
import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
public abstract class BaseTableColumnTypeTask extends BaseTableColumnTask {
private ColumnTypeEnum myColumnType;
private Boolean myNullable;
private Long myColumnLength;
@Nullable
private Object myDefaultValue;
/**
* Constructor
*/
@ -99,6 +105,21 @@ public abstract class BaseTableColumnTypeTask extends BaseTableColumnTask {
return this;
}
@Nullable
public Object getDefaultValue() {
return myDefaultValue;
}
@Nonnull
String buildString(@Nullable Object theValue, Function<Object, String> doIfNull, String theDefaultResult) {
return Optional.ofNullable(theValue).map(doIfNull).orElse(theDefaultResult);
}
public BaseTableColumnTypeTask setDefaultValue(Object theDefaultValue) {
myDefaultValue = theDefaultValue;
return this;
}
@Override
protected void generateHashCode(HashCodeBuilder theBuilder) {
super.generateHashCode(theBuilder);

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import ca.uhn.fhir.system.HapiSystemProperties;
import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
@ -36,6 +37,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.dao.DataAccessException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.ResultSetExtractor;
import org.springframework.transaction.support.TransactionTemplate;
import java.sql.SQLException;
@ -174,6 +176,17 @@ public abstract class BaseTask {
captureExecutedStatement(theTableName, theSql, theArguments);
}
protected <T> T executeSqlWithResult(
@Language("SQL") String theSql, ResultSetExtractor<T> theResultSetExtractor, Object... theArguments) {
if (myTransactional) {
return getConnectionProperties()
.getTxTemplate()
.execute(t -> doExecuteSqlWithResult(theSql, theResultSetExtractor, theArguments));
}
return doExecuteSqlWithResult(theSql, theResultSetExtractor, theArguments);
}
protected void executeSqlListInTransaction(String theTableName, List<String> theSqlStatements) {
if (!isDryRun()) {
Integer changes;
@ -207,7 +220,9 @@ public abstract class BaseTask {
return changesCount;
} catch (DataAccessException e) {
if (myFlags.contains(TaskFlagEnum.FAILURE_ALLOWED)) {
ourLog.info("Task {} did not exit successfully, but task is allowed to fail", getMigrationVersion());
ourLog.info(
"Task {} did not exit successfully on doExecuteSql(), but task is allowed to fail",
getMigrationVersion());
ourLog.debug("Error was: {}", e.getMessage(), e);
return 0;
} else {
@ -217,6 +232,32 @@ public abstract class BaseTask {
}
}
@Nullable
private <T> T doExecuteSqlWithResult(
@Language("SQL") String theSql, ResultSetExtractor<T> theResultSetExtractor, Object... theArguments) {
final JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
// 0 means no timeout -- we use this for index rebuilds that may take time.
jdbcTemplate.setQueryTimeout(0);
try {
T result = jdbcTemplate.query(theSql, theResultSetExtractor, theArguments);
if (!HapiSystemProperties.isUnitTestModeEnabled()) {
logInfo(ourLog, "SQL \"{}\" returned result {}", theSql, result);
}
return result;
} catch (DataAccessException e) {
if (myFlags.contains(TaskFlagEnum.FAILURE_ALLOWED)) {
ourLog.info(
"Task {} did not exit successfully on doExecuteSqlWithResult(), but task is allowed to fail",
getMigrationVersion());
ourLog.debug("Error was: {}", e.getMessage(), e);
return null;
} else {
throw new HapiMigrationException(
Msg.code(2532) + "Failed during task " + getMigrationVersion() + ": " + e, e);
}
}
}
protected void captureExecutedStatement(
String theTableName, @Language("SQL") String theSql, Object... theArguments) {
myExecutedStatements.add(new ExecutedStatement(mySchemaVersion, theTableName, theSql, theArguments));

View File

@ -0,0 +1,151 @@
/*-
* #%L
* HAPI FHIR Server - SQL Migration
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable;
import org.intellij.lang.annotations.Language;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.ResultSetExtractor;
import java.sql.SQLException;
/**
* Migration task that handles cross-database logic for dropping a primary key.
* <p>
* The process involves 2 steps for most databases:
* <ol>
* <li>Running SQL to introspect the metadata tables to determine the name of the primary key.</li>
* <li>Running an ALTER TABLE to drop the constraint found above by name.</li>
* </ol>
*/
public class DropPrimaryKeyTask extends BaseTableTask {
private static final Logger ourLog = LoggerFactory.getLogger(DropPrimaryKeyTask.class);
public DropPrimaryKeyTask(String theProductVersion, String theSchemaVersion, String theTableName) {
super(theProductVersion, theSchemaVersion);
setTableName(theTableName);
}
@Nonnull
private String generateSql() {
ourLog.debug("DropPrimaryKeyTask.generateSql()");
final ResultSetExtractor<String> resultSetExtractor = rs -> {
if (rs.next()) {
final String singleResult = rs.getString(1);
if (rs.next()) {
throw new IllegalArgumentException(Msg.code(2533)
+ "Expecting only a single result for the table primary but got multiple for task: "
+ getMigrationVersion());
}
return singleResult;
}
return null;
};
@Nullable
@Language("SQL")
final String primaryKeyNameSql = generatePrimaryKeyNameSql();
@Nullable
final String primaryKeyName = primaryKeyNameSql != null
? executeSqlWithResult(primaryKeyNameSql, resultSetExtractor, getTableNameWithDatabaseExpectedCase())
: null;
ourLog.debug("primaryKeyName: {} for driver: {}", primaryKeyName, getDriverType());
return generateDropPrimaryKeySql(primaryKeyName);
}
private String getTableNameWithDatabaseExpectedCase() {
if (DriverTypeEnum.ORACLE_12C == getDriverType()) {
return getTableName().toUpperCase();
}
return getTableName().toLowerCase();
}
@Override
protected void doExecute() throws SQLException {
logInfo(ourLog, "Going to DROP the PRIMARY KEY on table {}", getTableName());
executeSql(getTableName(), generateSql());
}
private String generateDropPrimaryKeySql(@Nullable String thePrimaryKeyName) {
switch (getDriverType()) {
case MARIADB_10_1:
case DERBY_EMBEDDED:
case H2_EMBEDDED:
@Language("SQL")
final String sqlH2 = "ALTER TABLE %s DROP PRIMARY KEY";
return String.format(sqlH2, getTableName());
case POSTGRES_9_4:
case ORACLE_12C:
case MSSQL_2012:
case MYSQL_5_7:
assert thePrimaryKeyName != null;
@Language("SQL")
final String sql = "ALTER TABLE %s DROP CONSTRAINT %s";
return String.format(sql, getTableName(), thePrimaryKeyName);
default:
throw new IllegalStateException(String.format(
"%s Unknown driver type: %s. Cannot drop primary key: %s for task %s",
Msg.code(2529), getDriverType(), getMigrationVersion(), getTableName()));
}
}
@Language("SQL")
@Nullable
private String generatePrimaryKeyNameSql() {
switch (getDriverType()) {
case MYSQL_5_7:
case MARIADB_10_1:
case DERBY_EMBEDDED:
case COCKROACHDB_21_1:
case H2_EMBEDDED:
return null; // Irrelevant: We don't need to run the SQL for these databases.
case POSTGRES_9_4:
return "SELECT constraint_name " + "FROM information_schema.table_constraints "
+ "WHERE table_schema = 'public' "
+ "AND constraint_type = 'PRIMARY KEY' "
+ "AND table_name = ?";
case ORACLE_12C:
return "SELECT constraint_name " + "FROM user_constraints "
+ "WHERE constraint_type = 'P' "
+ "AND table_name = ?";
case MSSQL_2012:
return "SELECT tc.constraint_name " + "FROM information_schema.table_constraints tc "
+ "JOIN information_schema.constraint_column_usage ccu ON tc.constraint_name = ccu.constraint_name "
+ "WHERE tc.constraint_type = 'PRIMARY KEY' "
+ "AND tc.table_name = ?";
default:
throw new IllegalStateException(String.format(
"%s Unknown driver type: %s Cannot find primary key to drop for task %s",
Msg.code(2530), getDriverType(), getMigrationVersion()));
}
}
}

View File

@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.AddColumnTask;
import ca.uhn.fhir.jpa.migrate.taskdef.AddForeignKeyTask;
import ca.uhn.fhir.jpa.migrate.taskdef.AddIdGeneratorTask;
import ca.uhn.fhir.jpa.migrate.taskdef.AddIndexTask;
import ca.uhn.fhir.jpa.migrate.taskdef.AddPrimaryKeyTask;
import ca.uhn.fhir.jpa.migrate.taskdef.AddTableByColumnTask;
import ca.uhn.fhir.jpa.migrate.taskdef.AddTableRawSqlTask;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTableTask;
@ -35,6 +36,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.DropColumnTask;
import ca.uhn.fhir.jpa.migrate.taskdef.DropForeignKeyTask;
import ca.uhn.fhir.jpa.migrate.taskdef.DropIdGeneratorTask;
import ca.uhn.fhir.jpa.migrate.taskdef.DropIndexTask;
import ca.uhn.fhir.jpa.migrate.taskdef.DropPrimaryKeyTask;
import ca.uhn.fhir.jpa.migrate.taskdef.DropTableTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteRawSqlTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ExecuteTaskPrecondition;
@ -47,6 +49,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.NopTask;
import ca.uhn.fhir.jpa.migrate.taskdef.RenameColumnTask;
import ca.uhn.fhir.jpa.migrate.taskdef.RenameIndexTask;
import ca.uhn.fhir.jpa.migrate.taskdef.RenameTableTask;
import jakarta.annotation.Nullable;
import org.apache.commons.lang3.Validate;
import org.intellij.lang.annotations.Language;
import org.slf4j.Logger;
@ -260,7 +263,13 @@ public class Builder {
}
public BuilderWithTableName.BuilderAddColumnWithName addColumn(String theVersion, String theColumnName) {
return new BuilderWithTableName.BuilderAddColumnWithName(myRelease, theVersion, theColumnName, this);
return new BuilderWithTableName.BuilderAddColumnWithName(myRelease, theVersion, theColumnName, null, this);
}
public BuilderWithTableName.BuilderAddColumnWithName addColumn(
String theVersion, String theColumnName, Object theDefaultValue) {
return new BuilderWithTableName.BuilderAddColumnWithName(
myRelease, theVersion, theColumnName, theDefaultValue, this);
}
public BuilderCompleteTask dropColumn(String theVersion, String theColumnName) {
@ -317,6 +326,10 @@ public class Builder {
return Optional.ofNullable(myLastAddedTask);
}
public void addPrimaryKey(String theVersion, String... theColumnsInOrder) {
addTask(new AddPrimaryKeyTask(myRelease, theVersion, myTableName, theColumnsInOrder));
}
/**
* @param theFkName the name of the foreign key
* @param theParentTableName the name of the table that exports the foreign key
@ -362,6 +375,11 @@ public class Builder {
return new BuilderCompleteTask(task);
}
public void dropPrimaryKey(String theVersion) {
final DropPrimaryKeyTask task = new DropPrimaryKeyTask(myRelease, theVersion, myTableName);
addTask(task);
}
public class BuilderAddIndexWithName {
private final String myVersion;
private final String myIndexName;
@ -547,16 +565,22 @@ public class Builder {
private final String myRelease;
private final String myVersion;
private final String myColumnName;
@Nullable
private final Object myDefaultValue;
private final BaseMigrationTasks.IAcceptsTasks myTaskSink;
public BuilderAddColumnWithName(
String theRelease,
String theVersion,
String theColumnName,
@Nullable Object theDefaultValue,
BaseMigrationTasks.IAcceptsTasks theTaskSink) {
myRelease = theRelease;
myVersion = theVersion;
myColumnName = theColumnName;
myDefaultValue = theDefaultValue;
myTaskSink = theTaskSink;
}
@ -593,6 +617,7 @@ public class Builder {
if (theLength != null) {
task.setColumnLength(theLength);
}
task.setDefaultValue(myDefaultValue);
myTaskSink.addTask(task);
return new BuilderCompleteTask(task);
@ -719,7 +744,7 @@ public class Builder {
}
public BuilderAddColumnWithName addColumn(String theColumnName) {
return new BuilderAddColumnWithName(myRelease, myVersion, theColumnName, this);
return new BuilderAddColumnWithName(myRelease, myVersion, theColumnName, null, this);
}
@Override