Fix postgres / Remove use of LOB content column (#5555)

* Work on fixing postgres

* Test fix

* wip

* wip

* wip

* wip

* wip

* adding support for h2 embedded and renaming postgressIT

* Work on postgres

* Remove use of gzip content on postgres

* Cleanup

* Test fixes

* Spotless

* Restore fake db for DDL generator

---------

Co-authored-by: peartree <etienne.poirier@smilecdr.com>
This commit is contained in:
James Agnew 2023-12-15 08:07:26 -05:00 committed by GitHub
parent d187399ce5
commit 7863f03c68
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
51 changed files with 572 additions and 272 deletions

View File

@ -0,0 +1,10 @@
---
type: perf
issue: 5555
title: "Previously, resource body content went into one of 2 columns on the HFJ_RES_VER table:
RES_TEXT if the size was above a configurable threshold, or RES_TEXT_VC if it was below that
threshold. Performance testing has shown that the latter is always faster, and that on
Postgres the use of the latter is particularly problematic since it maps to the
largeobject table which isn't the recommended way of storing high frequency objects.
The configurable threshold is now ignored, and the latter column is always used. Any legacy
data in the former column will still be read however."

View File

@ -19,7 +19,6 @@
*/
package ca.uhn.fhir.jpa.model.dialect;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.H2Dialect;
@ -38,7 +37,7 @@ public class HapiFhirH2Dialect extends H2Dialect {
/**
* As of Hibernate 6, generated schemas include a column level check constraint that enforces valid values
* for columns that back an Enum type. For example, the column definition for {@link ResourceTable#getFhirVersion()}
* for columns that back an Enum type. For example, the column definition for <code>ResourceTable#getFhirVersion()</code>
* would look like:
* <pre>
* RES_VERSION varchar(7) check (RES_VERSION in ('DSTU2','DSTU2_HL7ORG','DSTU2_1','DSTU3','R4','R4B','R5')),

View File

@ -48,8 +48,10 @@ public final class HapiEntityManagerFactoryUtil {
ConfigurableListableBeanFactory myConfigurableListableBeanFactory,
FhirContext theFhirContext,
JpaStorageSettings theStorageSettings) {
LocalContainerEntityManagerFactoryBean retVal =
new HapiFhirLocalContainerEntityManagerFactoryBean(myConfigurableListableBeanFactory);
configureEntityManagerFactory(retVal, theFhirContext, theStorageSettings);
return retVal;
}

View File

@ -148,9 +148,7 @@ import org.springframework.transaction.support.TransactionSynchronization;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import org.springframework.transaction.support.TransactionTemplate;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
@ -645,7 +643,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
theEntity.setResourceType(toResourceName(theResource));
}
byte[] resourceBinary;
String resourceText;
ResourceEncodingEnum encoding;
boolean changed = false;
@ -662,7 +659,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
if (address != null) {
encoding = ResourceEncodingEnum.ESR;
resourceBinary = null;
resourceText = address.getProviderId() + ":" + address.getLocation();
changed = true;
@ -680,19 +676,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
theEntity.setFhirVersion(myContext.getVersion().getVersion());
HashFunction sha256 = Hashing.sha256();
HashCode hashCode;
String encodedResource = encodeResource(theResource, encoding, excludeElements, myContext);
if (myStorageSettings.getInlineResourceTextBelowSize() > 0
&& encodedResource.length() < myStorageSettings.getInlineResourceTextBelowSize()) {
resourceText = encodedResource;
resourceBinary = null;
encoding = ResourceEncodingEnum.JSON;
hashCode = sha256.hashUnencodedChars(encodedResource);
} else {
resourceText = null;
resourceBinary = getResourceBinary(encoding, encodedResource);
hashCode = sha256.hashBytes(resourceBinary);
}
resourceText = encodeResource(theResource, encoding, excludeElements, myContext);
encoding = ResourceEncodingEnum.JSON;
HashCode hashCode = sha256.hashUnencodedChars(resourceText);
String hashSha256 = hashCode.toString();
if (!hashSha256.equals(theEntity.getHashSha256())) {
@ -710,7 +696,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} else {
encoding = null;
resourceBinary = null;
resourceText = null;
}
@ -728,7 +713,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
changed = true;
}
resourceBinary = null;
resourceText = null;
encoding = ResourceEncodingEnum.DEL;
}
@ -753,46 +737,19 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) {
changed = true;
} else {
changed = !Arrays.equals(currentHistoryVersion.getResource(), resourceBinary);
changed = !StringUtils.equals(currentHistoryVersion.getResourceTextVc(), resourceText);
}
}
}
EncodedResource retVal = new EncodedResource();
retVal.setEncoding(encoding);
retVal.setResourceBinary(resourceBinary);
retVal.setResourceText(resourceText);
retVal.setChanged(changed);
return retVal;
}
/**
* helper for returning the encoded byte array of the input resource string based on the encoding.
*
* @param encoding the encoding to used
* @param encodedResource the resource to encode
* @return byte array of the resource
*/
@Nonnull
private byte[] getResourceBinary(ResourceEncodingEnum encoding, String encodedResource) {
byte[] resourceBinary;
switch (encoding) {
case JSON:
resourceBinary = encodedResource.getBytes(StandardCharsets.UTF_8);
break;
case JSONC:
resourceBinary = GZipUtil.compress(encodedResource);
break;
default:
case DEL:
case ESR:
resourceBinary = new byte[0];
break;
}
return resourceBinary;
}
/**
* helper to format the meta element for serialization of the resource.
*
@ -1437,8 +1394,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
List<String> excludeElements = new ArrayList<>(8);
getExcludedElements(historyEntity.getResourceType(), excludeElements, theResource.getMeta());
String encodedResourceString = encodeResource(theResource, encoding, excludeElements, myContext);
byte[] resourceBinary = getResourceBinary(encoding, encodedResourceString);
boolean changed = !Arrays.equals(historyEntity.getResource(), resourceBinary);
boolean changed = !StringUtils.equals(historyEntity.getResourceTextVc(), encodedResourceString);
historyEntity.setUpdated(theTransactionDetails.getTransactionDate());
@ -1450,19 +1406,14 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
return historyEntity;
}
if (getStorageSettings().getInlineResourceTextBelowSize() > 0
&& encodedResourceString.length() < getStorageSettings().getInlineResourceTextBelowSize()) {
populateEncodedResource(encodedResource, encodedResourceString, null, ResourceEncodingEnum.JSON);
} else {
populateEncodedResource(encodedResource, null, resourceBinary, encoding);
}
populateEncodedResource(encodedResource, encodedResourceString, ResourceEncodingEnum.JSON);
}
/*
* Save the resource itself to the resourceHistoryTable
*/
historyEntity = myEntityManager.merge(historyEntity);
historyEntity.setEncoding(encodedResource.getEncoding());
historyEntity.setResource(encodedResource.getResourceBinary());
historyEntity.setResourceTextVc(encodedResource.getResourceText());
myResourceHistoryTableDao.save(historyEntity);
@ -1472,12 +1423,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
}
private void populateEncodedResource(
EncodedResource encodedResource,
String encodedResourceString,
byte[] theResourceBinary,
ResourceEncodingEnum theEncoding) {
EncodedResource encodedResource, String encodedResourceString, ResourceEncodingEnum theEncoding) {
encodedResource.setResourceText(encodedResourceString);
encodedResource.setResourceBinary(theResourceBinary);
encodedResource.setEncoding(theEncoding);
}
@ -1542,7 +1489,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
}
historyEntry.setEncoding(theChanged.getEncoding());
historyEntry.setResource(theChanged.getResourceBinary());
historyEntry.setResourceTextVc(theChanged.getResourceText());
ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId());

View File

@ -1689,19 +1689,17 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC
|| historyEntity.getEncoding() == ResourceEncodingEnum.JSON) {
byte[] resourceBytes = historyEntity.getResource();
// Always migrate data out of the bytes column
if (resourceBytes != null) {
String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding());
if (myStorageSettings.getInlineResourceTextBelowSize() > 0
&& resourceText.length() < myStorageSettings.getInlineResourceTextBelowSize()) {
ourLog.debug(
"Storing text of resource {} version {} as inline VARCHAR",
entity.getResourceId(),
historyEntity.getVersion());
historyEntity.setResourceTextVc(resourceText);
historyEntity.setResource(null);
historyEntity.setEncoding(ResourceEncodingEnum.JSON);
changed = true;
}
ourLog.debug(
"Storing text of resource {} version {} as inline VARCHAR",
entity.getResourceId(),
historyEntity.getVersion());
historyEntity.setResourceTextVc(resourceText);
historyEntity.setEncoding(ResourceEncodingEnum.JSON);
changed = true;
}
}
if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
@ -2071,6 +2069,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
});
}
@Override
public <PID extends IResourcePersistentId<?>> Stream<PID> searchForIdStream(
SearchParameterMap theParams,
RequestDetails theRequest,

View File

@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
class EncodedResource {
private boolean myChanged;
private byte[] myResource;
private ResourceEncodingEnum myEncoding;
private String myResourceText;
@ -36,14 +35,6 @@ class EncodedResource {
myEncoding = theEncoding;
}
public byte[] getResourceBinary() {
return myResource;
}
public void setResourceBinary(byte[] theResource) {
myResource = theResource;
}
public boolean isChanged() {
return myChanged;
}

View File

@ -79,4 +79,16 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
@Modifying
@Query("DELETE FROM ResourceHistoryTable t WHERE t.myId = :pid")
void deleteByPid(@Param("pid") Long theId);
/**
* This method is only for use in unit tests - It is used to move the stored resource body contents from the new
* <code>RES_TEXT_VC</code> column to the legacy <code>RES_TEXT</code> column, which is where data may have
* been stored by versions of HAPI FHIR prior to 7.0.0
*
* @since 7.0.0
*/
@Modifying
@Query(
"UPDATE ResourceHistoryTable r SET r.myResourceTextVc = null, r.myResource = :text, r.myEncoding = 'JSONC' WHERE r.myId = :pid")
void updateNonInlinedContents(@Param("text") byte[] theText, @Param("pid") long thePid);
}

View File

@ -1555,11 +1555,12 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
Builder.BuilderWithTableName nrmlTable = version.onTable("HFJ_SPIDX_QUANTITY_NRML");
nrmlTable.addColumn("20210111.1", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT);
nrmlTable.addColumn("20210111.2", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY);
// - The fk name is generated from Hibernate, have to use this name here
// Disabled - superceded by 20220304.33
nrmlTable
.addForeignKey("20210111.3", "FKRCJOVMUH5KC0O6FVBLE319PYV")
.toColumn("RES_ID")
.references("HFJ_RESOURCE", "RES_ID");
.references("HFJ_RESOURCE", "RES_ID")
.doNothing();
Builder.BuilderWithTableName quantityTable = version.onTable("HFJ_SPIDX_QUANTITY");
quantityTable

View File

@ -0,0 +1,40 @@
package ca.uhn.fhir.jpa.entity;
import ca.uhn.fhir.util.ClasspathUtil;
import org.apache.commons.lang3.StringUtils;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
public class GeneratedSchemaTest {
/**
* Make sure that the RES_TEXT_VC column, which is supposed to be an unlimited-length
* string datatype, actually uses an appropriate datatype on the various databases
* we care about.
*/
@Test
public void testVerifyLongVarcharColumnDefinition() {
validateLongVarcharDatatype("cockroachdb.sql", "varchar(2147483647)");
validateLongVarcharDatatype("derby.sql", "clob");
validateLongVarcharDatatype("mysql.sql", "longtext");
validateLongVarcharDatatype("mariadb.sql", "longtext");
validateLongVarcharDatatype("h2.sql", "clob");
validateLongVarcharDatatype("postgres.sql", "text");
validateLongVarcharDatatype("oracle.sql", "clob");
validateLongVarcharDatatype("sqlserver.sql", "varchar(max)");
}
private static void validateLongVarcharDatatype(String schemaName, String expectedDatatype) {
String schema = ClasspathUtil.loadResource("ca/uhn/hapi/fhir/jpa/docs/database/" + schemaName);
String[] lines = StringUtils.split(schema, '\n');
String resTextVc = Arrays.stream(lines).filter(t -> t.contains("RES_TEXT_VC ")).findFirst().orElseThrow();
assertThat("Wrong type in " + schemaName, resTextVc, containsString("RES_TEXT_VC " + expectedDatatype));
}
}

View File

@ -25,14 +25,15 @@ import ca.uhn.fhir.rest.api.Constants;
import jakarta.persistence.*;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.Length;
import org.hibernate.annotations.OptimisticLock;
import org.hibernate.type.SqlTypes;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import static org.apache.commons.lang3.StringUtils.defaultString;
@Entity
@Table(
name = ResourceHistoryTable.HFJ_RES_VER,
@ -57,7 +58,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
public static final int ENCODING_COL_LENGTH = 5;
public static final String HFJ_RES_VER = "HFJ_RES_VER";
public static final int RES_TEXT_VC_MAX_LENGTH = 4000;
private static final long serialVersionUID = 1L;
@Id
@ -86,13 +86,15 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
@OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
private Collection<ResourceHistoryTag> myTags;
/**
* Note: No setter for this field because it's only a legacy way of storing data now.
*/
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = true)
@Lob()
@OptimisticLock(excluded = true)
private byte[] myResource;
@Column(name = "RES_TEXT_VC", length = RES_TEXT_VC_MAX_LENGTH, nullable = true)
@JdbcTypeCode(SqlTypes.LONG32VARCHAR)
@Column(name = "RES_TEXT_VC", nullable = true, length = Length.LONG32)
@OptimisticLock(excluded = true)
private String myResourceTextVc;
@ -153,7 +155,8 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
}
public void setResourceTextVc(String theResourceTextVc) {
myResourceTextVc = theResourceTextVc;
myResource = null;
myResourceTextVc = defaultString(theResourceTextVc);
}
public ResourceHistoryProvenanceEntity getProvenance() {
@ -209,10 +212,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
return myResource;
}
public void setResource(byte[] theResource) {
myResource = theResource;
}
@Override
public Long getResourceId() {
return myResourceId;

View File

@ -41,14 +41,12 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test {
public void before() throws Exception {
super.before();
myPartitionSettings.setPartitioningEnabled(false);
myStorageSettings.setInlineResourceTextBelowSize(10000);
}
@AfterEach
@Override
public void after() {
super.after();
myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize());
}
@Test

View File

@ -91,7 +91,6 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
myStorageSettings.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
myStorageSettings.setIndexOnContainedResources(new JpaStorageSettings().isIndexOnContainedResources());
myStorageSettings.setIndexOnContainedResourcesRecursively(new JpaStorageSettings().isIndexOnContainedResourcesRecursively());
myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize());
}
@Test

View File

@ -1,102 +1,61 @@
package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.dao.GZipUtil;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.r4.model.IdType;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.HistorySearchDateRangeParam;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.HashMap;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class FhirResourceDaoR4InlineResourceModeTest extends BaseJpaR4Test {
@BeforeEach
public void beforeSetDao() {
myStorageSettings.setInlineResourceTextBelowSize(5000);
}
@AfterEach
public void afterResetDao() {
myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize());
}
@Test
public void testCreateWithInlineResourceTextStorage() {
Patient patient = new Patient();
patient.setActive(true);
Long resourceId = myPatientDao.create(patient).getId().getIdPartAsLong();
public void testRetrieveNonInlinedResource() {
IIdType id = createPatient(withActiveTrue());
Long pid = id.getIdPartAsLong();
patient = new Patient();
patient.setId("Patient/" + resourceId);
patient.setActive(false);
myPatientDao.update(patient);
relocateResourceTextToCompressedColumn(pid, 1L);
runInTransaction(() -> {
// Version 1
ResourceHistoryTable entity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(resourceId, 1);
assertNull(entity.getResource());
assertThat(entity.getResourceTextVc(), containsString("\"active\":true"));
// Version 2
entity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(resourceId, 2);
assertNull(entity.getResource());
assertThat(entity.getResourceTextVc(), containsString("\"active\":false"));
runInTransaction(()->{
ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(pid, 1);
assertNotNull(historyEntity.getResource());
assertNull(historyEntity.getResourceTextVc());
assertEquals(ResourceEncodingEnum.JSONC, historyEntity.getEncoding());
});
patient = myPatientDao.read(new IdType("Patient/" + resourceId));
assertFalse(patient.getActive());
// Read
validatePatient(myPatientDao.read(id.withVersion(null), mySrd));
patient = (Patient) myPatientDao.search(SearchParameterMap.newSynchronous()).getAllResources().get(0);
assertFalse(patient.getActive());
// VRead
validatePatient(myPatientDao.read(id.withVersion("1"), mySrd));
// Search (Sync)
validatePatient(myPatientDao.search(SearchParameterMap.newSynchronous(), mySrd).getResources(0, 1).get(0));
// Search (Async)
validatePatient(myPatientDao.search(new SearchParameterMap(), mySrd).getResources(0, 1).get(0));
// History
validatePatient(myPatientDao.history(id, new HistorySearchDateRangeParam(new HashMap<>(), new DateRangeParam(), 0), mySrd).getResources(0, 1).get(0));
}
@Test
public void testDontUseInlineAboveThreshold() {
String veryLongFamilyName = StringUtils.leftPad("", 6000, 'a');
Patient patient = new Patient();
patient.setActive(true);
patient.addName().setFamily(veryLongFamilyName);
Long resourceId = myPatientDao.create(patient).getId().getIdPartAsLong();
runInTransaction(() -> {
// Version 1
ResourceHistoryTable entity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(resourceId, 1);
assertNotNull(entity.getResource());
assertNull(entity.getResourceTextVc());
});
patient = myPatientDao.read(new IdType("Patient/" + resourceId));
assertEquals(veryLongFamilyName, patient.getNameFirstRep().getFamily());
}
@Test
public void testNopOnUnchangedUpdate() {
Patient patient = new Patient();
patient.setActive(true);
Long resourceId = myPatientDao.create(patient).getId().getIdPartAsLong();
patient = new Patient();
patient.setId("Patient/" + resourceId);
patient.setActive(true);
DaoMethodOutcome updateOutcome = myPatientDao.update(patient);
assertEquals("1", updateOutcome.getId().getVersionIdPart());
assertTrue(updateOutcome.isNop());
private void validatePatient(IBaseResource theRead) {
assertTrue(((Patient)theRead).getActive());
}

View File

@ -1031,26 +1031,15 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
@ParameterizedTest
@CsvSource({
// NoOp OptimisticLock OptimizeMode ExpectedSelect ExpectedUpdate
" false, false, CURRENT_VERSION, 2, 1",
" true, false, CURRENT_VERSION, 2, 0",
" false, true, CURRENT_VERSION, 12, 1",
" true, true, CURRENT_VERSION, 12, 0",
" false, false, ALL_VERSIONS, 12, 10",
" true, false, ALL_VERSIONS, 12, 0",
" false, true, ALL_VERSIONS, 22, 10",
" true, true, ALL_VERSIONS, 22, 0",
// OptimisticLock OptimizeMode ExpectedSelect ExpectedUpdate
" false, CURRENT_VERSION, 2, 0",
" true, CURRENT_VERSION, 12, 0",
" false, ALL_VERSIONS, 12, 0",
" true, ALL_VERSIONS, 22, 0",
})
public void testReindexJob_OptimizeStorage(boolean theNoOp, boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) {
public void testReindexJob_OptimizeStorage(boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) {
// Setup
// In no-op mode, the inlining is already in the state it needs to be in
if (theNoOp) {
myStorageSettings.setInlineResourceTextBelowSize(10000);
} else {
myStorageSettings.setInlineResourceTextBelowSize(0);
}
ResourceIdListWorkChunkJson data = new ResourceIdListWorkChunkJson();
IIdType patientId = createPatient(withActiveTrue());
for (int i = 0; i < 10; i++) {

View File

@ -274,7 +274,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
ResourceHistoryTable newHistory = table.toHistory(true);
ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(table.getId(), 1L);
newHistory.setEncoding(currentHistory.getEncoding());
newHistory.setResource(currentHistory.getResource());
newHistory.setResourceTextVc(currentHistory.getResourceTextVc());
myResourceHistoryTableDao.save(newHistory);
});
@ -2928,7 +2928,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L);
String newContent = myFhirContext.newJsonParser().encodeResourceToString(p);
newContent = newContent.replace("male", "foo");
table.setResource(newContent.getBytes(Charsets.UTF_8));
table.setResourceTextVc(newContent);
table.setEncoding(ResourceEncodingEnum.JSON);
myResourceHistoryTableDao.save(table);
}

View File

@ -620,11 +620,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
template.execute((TransactionCallback<ResourceTable>) t -> {
ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), id.getVersionIdPartAsLong());
resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON);
try {
resourceHistoryTable.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new Error(e);
}
resourceHistoryTable.setResourceTextVc("{\"resourceType\":\"FOO\"}");
myResourceHistoryTableDao.save(resourceHistoryTable);
ResourceTable table = myResourceTableDao.findById(id.getIdPartAsLong()).orElseThrow(IllegalStateException::new);
@ -1917,11 +1913,11 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
Patient p = new Patient();
p.addIdentifier().setSystem("urn:system").setValue(methodName);
myPatientDao.create(p, mySrd).getId();
myPatientDao.create(p, mySrd);
p = new Patient();
p.addIdentifier().setSystem("urn:system").setValue(methodName);
myPatientDao.create(p, mySrd).getId();
myPatientDao.create(p, mySrd);
Observation o = new Observation();
o.getCode().setText("Some Observation");

View File

@ -87,6 +87,11 @@ public class ReindexJobTest extends BaseJpaR4Test {
createPatient(withActiveTrue());
}
// Move resource text to compressed storage, which we don't write to anymore but legacy
// data may exist that was previously stored there, so we're simulating that.
List<ResourceHistoryTable> allHistoryEntities = runInTransaction(() -> myResourceHistoryTableDao.findAll());
allHistoryEntities.forEach(t->relocateResourceTextToCompressedColumn(t.getResourceId(), t.getVersion()));
runInTransaction(()->{
assertEquals(20, myResourceHistoryTableDao.count());
for (ResourceHistoryTable history : myResourceHistoryTableDao.findAll()) {
@ -141,6 +146,11 @@ public class ReindexJobTest extends BaseJpaR4Test {
createPatient(withActiveTrue());
}
// Move resource text to compressed storage, which we don't write to anymore but legacy
// data may exist that was previously stored there, so we're simulating that.
List<ResourceHistoryTable> allHistoryEntities = runInTransaction(() -> myResourceHistoryTableDao.findAll());
allHistoryEntities.forEach(t->relocateResourceTextToCompressedColumn(t.getResourceId(), t.getVersion()));
runInTransaction(()->{
assertEquals(20, myResourceHistoryTableDao.count());
for (ResourceHistoryTable history : myResourceHistoryTableDao.findAll()) {
@ -149,8 +159,6 @@ public class ReindexJobTest extends BaseJpaR4Test {
}
});
myStorageSettings.setInlineResourceTextBelowSize(10000);
// execute
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);

View File

@ -38,11 +38,9 @@ public class ResourceProviderInvalidDataR4Test extends BaseResourceProviderR4Tes
// Manually set the value to be an invalid decimal number
runInTransaction(() -> {
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id, 1);
byte[] bytesCompressed = resVer.getResource();
String resourceText = GZipUtil.decompress(bytesCompressed);
String resourceText = resVer.getResourceTextVc();
resourceText = resourceText.replace("100", "-.100");
bytesCompressed = GZipUtil.compress(resourceText);
resVer.setResource(bytesCompressed);
resVer.setResourceTextVc(resourceText);
myResourceHistoryTableDao.save(resVer);
});

View File

@ -54,6 +54,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.util.ClasspathUtil;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.TestUtil;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
@ -6723,7 +6724,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
// Update Patient after delay
int delayInMs = 1000;
TimeUnit.MILLISECONDS.sleep(delayInMs);
TestUtil.sleepAtLeast(delayInMs + 100);
patient.getNameFirstRep().addGiven("Bob");
myClient.update().resource(patient).execute();

View File

@ -397,6 +397,11 @@ public class GiantTransactionPerfTest {
throw new UnsupportedOperationException();
}
@Override
public void updateNonInlinedContents(byte[] theText, long thePid) {
throw new UnsupportedOperationException();
}
@Nonnull
@Override
public List<ResourceHistoryTable> findAll() {

View File

@ -34,6 +34,20 @@
<scope>test</scope>
</dependency>
<!-- Needed for Testcontainers -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>

View File

@ -0,0 +1,148 @@
package ca.uhn.fhir.jpa.dao.r5.database;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.embedded.JpaEmbeddedDatabase;
import ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc;
import ca.uhn.fhir.jpa.migrate.MigrationTaskList;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
import ca.uhn.fhir.jpa.test.config.TestR5Config;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.util.VersionEnum;
import jakarta.persistence.EntityManagerFactory;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r5.model.Patient;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.envers.repository.support.EnversRevisionRepositoryFactoryBean;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import javax.sql.DataSource;
import java.util.Properties;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
@ExtendWith(SpringExtension.class)
@EnableJpaRepositories(repositoryFactoryBeanClass = EnversRevisionRepositoryFactoryBean.class)
@ContextConfiguration(classes = {BaseDatabaseVerificationIT.TestConfig.class})
public abstract class BaseDatabaseVerificationIT {
private static final Logger ourLog = LoggerFactory.getLogger(BaseDatabaseVerificationIT.class);
private static final String MIGRATION_TABLENAME = "MIGRATIONS";
@Autowired
EntityManagerFactory myEntityManagerFactory;
@Autowired
JpaEmbeddedDatabase myJpaEmbeddedDatabase;
@Autowired
IFhirResourceDao<Patient> myPatientDao;
@ParameterizedTest
@ValueSource(ints = {10, 100000})
public void testCreateRead(int theSize) {
String name = StringUtils.leftPad("", theSize, "a");
Patient patient = new Patient();
patient.setActive(true);
patient.addName().setFamily(name);
IIdType id = myPatientDao.create(patient, new SystemRequestDetails()).getId();
Patient actual = myPatientDao.read(id, new SystemRequestDetails());
assertEquals(name, actual.getName().get(0).getFamily());
}
@Test
public void testDelete() {
Patient patient = new Patient();
patient.setActive(true);
IIdType id = myPatientDao.create(patient, new SystemRequestDetails()).getId().toUnqualifiedVersionless();
myPatientDao.delete(id, new SystemRequestDetails());
assertThrows(ResourceGoneException.class, () -> myPatientDao.read(id, new SystemRequestDetails()));
}
@Configuration
public static class TestConfig extends TestR5Config {
@Autowired
private JpaDatabaseContextConfigParamObject myJpaDatabaseContextConfigParamObject;
@Override
@Bean
public DataSource dataSource() {
DataSource dataSource = myJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase().getDataSource();
HapiMigrationDao hapiMigrationDao = new HapiMigrationDao(dataSource, myJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase().getDriverType(), MIGRATION_TABLENAME);
HapiMigrationStorageSvc hapiMigrationStorageSvc = new HapiMigrationStorageSvc(hapiMigrationDao);
MigrationTaskList tasks = new HapiFhirJpaMigrationTasks(Set.of()).getAllTasks(VersionEnum.values());
SchemaMigrator schemaMigrator = new SchemaMigrator(
"HAPI FHIR", MIGRATION_TABLENAME, dataSource, new Properties(), tasks, hapiMigrationStorageSvc);
schemaMigrator.setDriverType(myJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase().getDriverType());
ourLog.info("About to run migration...");
schemaMigrator.createMigrationTableIfRequired();
schemaMigrator.migrate();
ourLog.info("Migration complete");
return dataSource;
}
@Bean
public JpaEmbeddedDatabase jpaEmbeddedDatabase(JpaDatabaseContextConfigParamObject theJpaDatabaseContextConfigParamObject) {
return theJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase();
}
@Override
protected Properties jpaProperties() {
Properties retVal = super.jpaProperties();
retVal.put("hibernate.hbm2ddl.auto", "none");
retVal.put("hibernate.dialect", myJpaDatabaseContextConfigParamObject.getDialect());
return retVal;
}
}
public static class JpaDatabaseContextConfigParamObject {
private JpaEmbeddedDatabase myJpaEmbeddedDatabase;
private String myDialect;
public JpaDatabaseContextConfigParamObject(JpaEmbeddedDatabase theJpaEmbeddedDatabase, String theDialect) {
myJpaEmbeddedDatabase = theJpaEmbeddedDatabase;
myDialect = theDialect;
}
public JpaEmbeddedDatabase getJpaEmbeddedDatabase() {
return myJpaEmbeddedDatabase;
}
public String getDialect() {
return myDialect;
}
}
}

View File

@ -0,0 +1,27 @@
package ca.uhn.fhir.jpa.dao.r5.database;
import ca.uhn.fhir.jpa.embedded.MsSqlEmbeddedDatabase;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirSQLServerDialect;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
@ContextConfiguration(classes = {
DatabaseVerificationWithMsSqlIT.TestConfig.class
})
public class DatabaseVerificationWithMsSqlIT extends BaseDatabaseVerificationIT {
@Configuration
public static class TestConfig {
@Bean
public JpaDatabaseContextConfigParamObject jpaDatabaseParamObject() {
return new JpaDatabaseContextConfigParamObject(
new MsSqlEmbeddedDatabase(),
HapiFhirSQLServerDialect.class.getName()
);
}
}
}

View File

@ -0,0 +1,26 @@
package ca.uhn.fhir.jpa.dao.r5.database;
import ca.uhn.fhir.jpa.embedded.OracleEmbeddedDatabase;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirOracleDialect;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
@ContextConfiguration(classes = {
DatabaseVerificationWithOracleIT.TestConfig.class
})
public class DatabaseVerificationWithOracleIT extends BaseDatabaseVerificationIT {
@Configuration
public static class TestConfig {
@Bean
public JpaDatabaseContextConfigParamObject jpaDatabaseParamObject(){
return new JpaDatabaseContextConfigParamObject(
new OracleEmbeddedDatabase(),
HapiFhirOracleDialect.class.getName()
);
}
}
}

View File

@ -0,0 +1,26 @@
package ca.uhn.fhir.jpa.dao.r5.database;
import ca.uhn.fhir.jpa.embedded.PostgresEmbeddedDatabase;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
@ContextConfiguration(classes = {
DatabaseVerificationWithPostgresIT.TestConfig.class
})
public class DatabaseVerificationWithPostgresIT extends BaseDatabaseVerificationIT {
@Configuration
public static class TestConfig {
@Bean
public JpaDatabaseContextConfigParamObject jpaDatabaseParamObject() {
return new JpaDatabaseContextConfigParamObject(
new PostgresEmbeddedDatabase(),
HapiFhirPostgresDialect.class.getName()
);
}
}
}

View File

@ -131,20 +131,14 @@
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>postgresql</artifactId>
<version>1.17.6</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>mssqlserver</artifactId>
<version>1.17.6</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>oracle-xe</artifactId>
<version>1.17.6</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.embedded;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import jakarta.annotation.PreDestroy;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -53,6 +54,7 @@ public abstract class JpaEmbeddedDatabase {
private JdbcTemplate myJdbcTemplate;
private Connection myConnection;
@PreDestroy
public abstract void stop();
public abstract void disableConstraints();
@ -116,7 +118,7 @@ public abstract class JpaEmbeddedDatabase {
for (String sql : theStatements) {
if (!StringUtils.isBlank(sql)) {
statement.addBatch(sql);
ourLog.info("Added to batch: {}", sql);
ourLog.debug("Added to batch: {}", sql);
}
}
statement.executeBatch();

View File

@ -40,6 +40,7 @@ import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
import ca.uhn.fhir.jpa.dao.GZipUtil;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
@ -82,6 +83,7 @@ import ca.uhn.fhir.jpa.entity.TermValueSet;
import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.packages.IPackageInstallerSvc;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
import ca.uhn.fhir.jpa.provider.JpaSystemProvider;
@ -663,6 +665,14 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
return myTxManager;
}
protected void relocateResourceTextToCompressedColumn(Long theResourcePid, Long theVersion) {
runInTransaction(()->{
ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theResourcePid, theVersion);
byte[] contents = GZipUtil.compress(historyEntity.getResourceTextVc());
myResourceHistoryTableDao.updateNonInlinedContents(contents, historyEntity.getId());
});
}
protected ValidationResult validateWithResult(IBaseResource theResource) {
FhirValidator validatorModule = myFhirContext.newValidator();
FhirInstanceValidator instanceValidator = new FhirInstanceValidator(myValidationSupport);

View File

@ -186,7 +186,7 @@ public class TestR5Config {
return retVal;
}
private Properties jpaProperties() {
protected Properties jpaProperties() {
Properties extraProperties = new Properties();
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");

View File

@ -34,7 +34,7 @@ public enum ColumnTypeEnum {
/**
* Unlimited length text, with a column definition containing the annotation:
* <code>@JdbcTypeCode(SqlTypes.LONG32VARCHAR)</code>
* <code>@Column(length=Integer.MAX_VALUE)</code>
*/
TEXT,
BIG_DECIMAL;

View File

@ -62,7 +62,7 @@ public final class ColumnTypeToDriverTypeToSqlType {
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.MYSQL_5_7, "double precision");
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.MSSQL_2012, "double precision");
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.ORACLE_12C, "double precision");
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.POSTGRES_9_4, "float8");
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.POSTGRES_9_4, "double precision");
setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.H2_EMBEDDED, "bigint");
setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.DERBY_EMBEDDED, "bigint");
@ -123,7 +123,7 @@ public final class ColumnTypeToDriverTypeToSqlType {
"oid"); // the PG driver will write oid into a `text` column
setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.MSSQL_2012, "varchar(MAX)");
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.H2_EMBEDDED, "character large object");
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.H2_EMBEDDED, "clob");
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.DERBY_EMBEDDED, "clob");
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.MARIADB_10_1, "longtext");
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.MYSQL_5_7, "longtext");

View File

@ -267,14 +267,6 @@ public class JpaStorageSettings extends StorageSettings {
* @since 5.6.0
*/
private boolean myAdvancedHSearchIndexing = false;
/**
* If set to a positive number, any resources with a character length at or below the given number
* of characters will be stored inline in the <code>HFJ_RES_VER</code> table instead of using a
* separate LOB column.
*
* @since 5.7.0
*/
private int myInlineResourceTextBelowSize = 0;
/**
* @since 5.7.0
@ -381,25 +373,21 @@ public class JpaStorageSettings extends StorageSettings {
}
/**
* If set to a positive number, any resources with a character length at or below the given number
* of characters will be stored inline in the <code>HFJ_RES_VER</code> table instead of using a
* separate LOB column.
*
* @since 5.7.0
* @deprecated This setting no longer does anything as of HAPI FHIR 7.0.0
*/
@Deprecated
public int getInlineResourceTextBelowSize() {
return myInlineResourceTextBelowSize;
return 0;
}
/**
* If set to a positive number, any resources with a character length at or below the given number
* of characters will be stored inline in the <code>HFJ_RES_VER</code> table instead of using a
* separate LOB column.
*
* @since 5.7.0
* @deprecated This setting no longer does anything as of HAPI FHIR 7.0.0
*/
@Deprecated
public void setInlineResourceTextBelowSize(int theInlineResourceTextBelowSize) {
myInlineResourceTextBelowSize = theInlineResourceTextBelowSize;
// ignored
}
/**

View File

@ -361,9 +361,6 @@ public class JpaModelScannerAndVerifier {
if (!theIsView && column.length() == 255) {
throw new IllegalStateException(Msg.code(1626) + "Field does not have an explicit maximum length specified: " + field);
}
if (column.length() > MAX_COL_LENGTH) {
throw new IllegalStateException(Msg.code(1627) + "Field is too long: " + field);
}
}
Size size = theAnnotatedElement.getAnnotation(Size.class);

View File

@ -184,14 +184,6 @@
<dependency>
<groupId>org.apache.maven</groupId>
<artifactId>maven-plugin-api</artifactId>
<!--
<exclusions>
<exclusion>
<groupId>org.eclipse.sisu</groupId>
<artifactId>org.eclipse.sisu.plexus</artifactId>
</exclusion>
</exclusions>
-->
</dependency>
<dependency>
<groupId>org.apache.maven.plugin-tools</groupId>

View File

@ -1,6 +1,7 @@
package ca.uhn.fhir.tinder.ddl;
import ca.uhn.fhir.jpa.util.ISequenceValueMassager;
import ca.uhn.fhir.util.IoUtil;
import jakarta.annotation.Nonnull;
import jakarta.persistence.Entity;
import jakarta.persistence.MappedSuperclass;
@ -29,6 +30,7 @@ import org.springframework.core.io.ResourceLoader;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.type.filter.AnnotationTypeFilter;
import java.io.Closeable;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
@ -125,19 +127,8 @@ public class DdlGeneratorHibernate61 {
writeContentsToFile(nextDialect.getAppendFile(), classLoader, outputFile);
}
}
private static void writeContentsToFile(String prependFile, ClassLoader classLoader, File outputFile)
throws MojoFailureException {
if (isNotBlank(prependFile)) {
ResourceLoader loader = new DefaultResourceLoader(classLoader);
Resource resource = loader.getResource(prependFile);
try (Writer w = new FileWriter(outputFile, true)) {
w.append(resource.getContentAsString(StandardCharsets.UTF_8));
} catch (IOException e) {
throw new MojoFailureException("Failed to write to file " + outputFile + ": " + e.getMessage(), e);
}
}
IoUtil.closeQuietly(connectionProvider);
}
public void setProject(MavenProject theProject) {
@ -204,18 +195,64 @@ public class DdlGeneratorHibernate61 {
* here. The schema export doesn't actually touch this DB, so it doesn't
* matter that it doesn't correlate to the specified dialect.
*/
private static class FakeConnectionConnectionProvider extends UserSuppliedConnectionProviderImpl {
private static class FakeConnectionConnectionProvider extends UserSuppliedConnectionProviderImpl
implements Closeable {
private static final long serialVersionUID = 4147495169899817244L;
private Connection connection;
@Override
public Connection getConnection() throws SQLException {
ourLog.trace("Using internal driver: {}", org.h2.Driver.class);
return DriverManager.getConnection("jdbc:h2:mem:tmp", "sa", "sa");
public FakeConnectionConnectionProvider() {
try {
connection = DriverManager.getConnection("jdbc:h2:mem:tmp", "sa", "sa");
} catch (SQLException e) {
connection = null;
return;
}
/*
* The Oracle Dialect tries to query for any existing sequences, so we need to supply
* a fake empty table to answer that query.
*/
try {
connection.setAutoCommit(true);
connection
.prepareStatement("create table all_sequences (PID bigint not null, primary key (PID))")
.execute();
} catch (SQLException e) {
ourLog.error("Failed to create sequences table", e);
}
}
@Override
public void closeConnection(Connection conn) throws SQLException {
conn.close();
public Connection getConnection() {
ourLog.trace("Using internal driver: {}", org.h2.Driver.class);
return connection;
}
@Override
public void closeConnection(Connection conn) {
// ignore
}
@Override
public void close() throws IOException {
try {
connection.close();
} catch (SQLException e) {
throw new IOException(e);
}
}
}
private static void writeContentsToFile(String prependFile, ClassLoader classLoader, File outputFile)
throws MojoFailureException {
if (isNotBlank(prependFile)) {
ResourceLoader loader = new DefaultResourceLoader(classLoader);
Resource resource = loader.getResource(prependFile);
try (Writer w = new FileWriter(outputFile, true)) {
w.append(resource.getContentAsString(StandardCharsets.UTF_8));
} catch (IOException e) {
throw new MojoFailureException("Failed to write to file " + outputFile + ": " + e.getMessage(), e);
}
}
}
}

View File

@ -28,13 +28,13 @@ public class GenerateDdlMojo extends AbstractMojo {
private static final Logger ourLog = LoggerFactory.getLogger(GenerateDdlMojo.class);
@Parameter
private List<String> packageNames;
List<String> packageNames;
@Parameter
private List<Dialect> dialects;
List<Dialect> dialects;
@Parameter
private String outputDirectory;
String outputDirectory;
@Parameter(defaultValue = "${project}", readonly = true)
private transient MavenProject project;
@ -70,18 +70,20 @@ public class GenerateDdlMojo extends AbstractMojo {
public static void main(String[] args) throws MojoExecutionException, MojoFailureException {
/*
* Note, to execute this, add the following snippet to this module's POM. The whole project won't work with
* Note, to execute this for real entities, add the following snippet to this module's POM. The whole project won't work with
* that added, but you can add it temporarily in order to debug this in IJ:
* <dependency>
* <groupId>ca.uhn.hapi.fhir</groupId>
* <artifactId>hapi-fhir-jpaserver-model</artifactId>
* <version>${project.version}</version>
* </dependency>
*
* Alternately, there is a unit test with fake entities that also runs this class.
*/
GenerateDdlMojo m = new GenerateDdlMojo();
m.packageNames = List.of("ca.uhn.fhir.jpa.model.entity");
m.outputDirectory = "hapi-tinder-plugin/target";
m.dialects = List.of(new Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect", "h2.sql"));
m.dialects = List.of(new Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect", "postgres.sql"));
m.execute();
}

View File

@ -0,0 +1,46 @@
package ca.uhn.fhir.tinder.ddl;
import org.apache.commons.io.FileUtils;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Locale;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
class GenerateDdlMojoTest {
@Test
public void testGenerateSequences() throws MojoExecutionException, MojoFailureException, IOException {
GenerateDdlMojo m = new GenerateDdlMojo();
m.packageNames = List.of("ca.uhn.fhir.tinder.ddl.test");
m.outputDirectory = "target/generate-ddl-plugin-test/";
m.dialects = List.of(
new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect", "h2.sql"),
new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect", "postgres.sql"),
new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirOracleDialect", "oracle.sql"),
new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirSQLServerDialect", "sqlserver.sql")
);
m.execute();
verifySequence("sqlserver.sql");
verifySequence("oracle.sql");
verifySequence("postgres.sql");
verifySequence("h2.sql");
}
private static void verifySequence(String fileName) throws IOException {
String contents = FileUtils.readFileToString(new File("target/generate-ddl-plugin-test/" + fileName), StandardCharsets.UTF_8).toUpperCase(Locale.ROOT);
assertThat(fileName, contents, containsString("CREATE SEQUENCE"));
}
}

View File

@ -0,0 +1,25 @@
package ca.uhn.fhir.tinder.ddl.test;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
@Table()
@Entity()
public class ExampleEntity {
@Id
@SequenceGenerator(name = "SEQ_RESOURCE_HISTORY_ID", sequenceName = "SEQ_RESOURCE_HISTORY_ID")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RESOURCE_HISTORY_ID")
@Column(name = "PID")
private Long myId;
@Column(name = "RES_ID", nullable = false, updatable = false, insertable = false)
private Long myResourceId;
}

18
pom.xml
View File

@ -2188,7 +2188,21 @@
<groupId>org.testcontainers</groupId>
<artifactId>testcontainers</artifactId>
<version>${testcontainers_version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>postgresql</artifactId>
<version>${testcontainers_version}</version>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>mssqlserver</artifactId>
<version>${testcontainers_version}</version>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>oracle-xe</artifactId>
<version>${testcontainers_version}</version>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
@ -2343,7 +2357,7 @@
<maxmem>2000m</maxmem>
<compilerArgs>
<arg>-XDcompilePolicy=simple</arg>
<arg>-Xplugin:ErrorProne -Xep:MissingSummary:OFF</arg>
<arg>-Xplugin:ErrorProne -Xep:MissingSummary:OFF -XepExcludedPaths:.*/src/test/java/.*</arg>
<arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED</arg>
<arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED</arg>
<arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED</arg>