Fix postgres / Remove use of LOB content column (#5555)

* Work on fixing postgres

* Test fix

* wip

* wip

* wip

* wip

* wip

* adding support for h2 embedded and renaming postgressIT

* Work on postgres

* Remove use of gzip content on postgres

* Cleanup

* Test fixes

* Spotless

* Restore fake db for DDL generator

---------

Co-authored-by: peartree <etienne.poirier@smilecdr.com>
This commit is contained in:
James Agnew 2023-12-15 08:07:26 -05:00 committed by GitHub
parent d187399ce5
commit 7863f03c68
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
51 changed files with 572 additions and 272 deletions

View File

@ -0,0 +1,10 @@
---
type: perf
issue: 5555
title: "Previously, resource body content went into one of 2 columns on the HFJ_RES_VER table:
RES_TEXT if the size was above a configurable threshold, or RES_TEXT_VC if it was below that
threshold. Performance testing has shown that the latter is always faster, and that on
Postgres the use of the latter is particularly problematic since it maps to the
largeobject table which isn't the recommended way of storing high frequency objects.
The configurable threshold is now ignored, and the latter column is always used. Any legacy
data in the former column will still be read however."

View File

@ -19,7 +19,6 @@
*/ */
package ca.uhn.fhir.jpa.model.dialect; package ca.uhn.fhir.jpa.model.dialect;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import org.hibernate.dialect.DatabaseVersion; import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.H2Dialect; import org.hibernate.dialect.H2Dialect;
@ -38,7 +37,7 @@ public class HapiFhirH2Dialect extends H2Dialect {
/** /**
* As of Hibernate 6, generated schemas include a column level check constraint that enforces valid values * As of Hibernate 6, generated schemas include a column level check constraint that enforces valid values
* for columns that back an Enum type. For example, the column definition for {@link ResourceTable#getFhirVersion()} * for columns that back an Enum type. For example, the column definition for <code>ResourceTable#getFhirVersion()</code>
* would look like: * would look like:
* <pre> * <pre>
* RES_VERSION varchar(7) check (RES_VERSION in ('DSTU2','DSTU2_HL7ORG','DSTU2_1','DSTU3','R4','R4B','R5')), * RES_VERSION varchar(7) check (RES_VERSION in ('DSTU2','DSTU2_HL7ORG','DSTU2_1','DSTU3','R4','R4B','R5')),

View File

@ -48,8 +48,10 @@ public final class HapiEntityManagerFactoryUtil {
ConfigurableListableBeanFactory myConfigurableListableBeanFactory, ConfigurableListableBeanFactory myConfigurableListableBeanFactory,
FhirContext theFhirContext, FhirContext theFhirContext,
JpaStorageSettings theStorageSettings) { JpaStorageSettings theStorageSettings) {
LocalContainerEntityManagerFactoryBean retVal = LocalContainerEntityManagerFactoryBean retVal =
new HapiFhirLocalContainerEntityManagerFactoryBean(myConfigurableListableBeanFactory); new HapiFhirLocalContainerEntityManagerFactoryBean(myConfigurableListableBeanFactory);
configureEntityManagerFactory(retVal, theFhirContext, theStorageSettings); configureEntityManagerFactory(retVal, theFhirContext, theStorageSettings);
return retVal; return retVal;
} }

View File

@ -148,9 +148,7 @@ import org.springframework.transaction.support.TransactionSynchronization;
import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.transaction.support.TransactionSynchronizationManager;
import org.springframework.transaction.support.TransactionTemplate; import org.springframework.transaction.support.TransactionTemplate;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Date; import java.util.Date;
@ -645,7 +643,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
theEntity.setResourceType(toResourceName(theResource)); theEntity.setResourceType(toResourceName(theResource));
} }
byte[] resourceBinary;
String resourceText; String resourceText;
ResourceEncodingEnum encoding; ResourceEncodingEnum encoding;
boolean changed = false; boolean changed = false;
@ -662,7 +659,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
if (address != null) { if (address != null) {
encoding = ResourceEncodingEnum.ESR; encoding = ResourceEncodingEnum.ESR;
resourceBinary = null;
resourceText = address.getProviderId() + ":" + address.getLocation(); resourceText = address.getProviderId() + ":" + address.getLocation();
changed = true; changed = true;
@ -680,19 +676,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
theEntity.setFhirVersion(myContext.getVersion().getVersion()); theEntity.setFhirVersion(myContext.getVersion().getVersion());
HashFunction sha256 = Hashing.sha256(); HashFunction sha256 = Hashing.sha256();
HashCode hashCode; resourceText = encodeResource(theResource, encoding, excludeElements, myContext);
String encodedResource = encodeResource(theResource, encoding, excludeElements, myContext); encoding = ResourceEncodingEnum.JSON;
if (myStorageSettings.getInlineResourceTextBelowSize() > 0 HashCode hashCode = sha256.hashUnencodedChars(resourceText);
&& encodedResource.length() < myStorageSettings.getInlineResourceTextBelowSize()) {
resourceText = encodedResource;
resourceBinary = null;
encoding = ResourceEncodingEnum.JSON;
hashCode = sha256.hashUnencodedChars(encodedResource);
} else {
resourceText = null;
resourceBinary = getResourceBinary(encoding, encodedResource);
hashCode = sha256.hashBytes(resourceBinary);
}
String hashSha256 = hashCode.toString(); String hashSha256 = hashCode.toString();
if (!hashSha256.equals(theEntity.getHashSha256())) { if (!hashSha256.equals(theEntity.getHashSha256())) {
@ -710,7 +696,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} else { } else {
encoding = null; encoding = null;
resourceBinary = null;
resourceText = null; resourceText = null;
} }
@ -728,7 +713,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
changed = true; changed = true;
} }
resourceBinary = null;
resourceText = null; resourceText = null;
encoding = ResourceEncodingEnum.DEL; encoding = ResourceEncodingEnum.DEL;
} }
@ -753,46 +737,19 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) { if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) {
changed = true; changed = true;
} else { } else {
changed = !Arrays.equals(currentHistoryVersion.getResource(), resourceBinary); changed = !StringUtils.equals(currentHistoryVersion.getResourceTextVc(), resourceText);
} }
} }
} }
EncodedResource retVal = new EncodedResource(); EncodedResource retVal = new EncodedResource();
retVal.setEncoding(encoding); retVal.setEncoding(encoding);
retVal.setResourceBinary(resourceBinary);
retVal.setResourceText(resourceText); retVal.setResourceText(resourceText);
retVal.setChanged(changed); retVal.setChanged(changed);
return retVal; return retVal;
} }
/**
* helper for returning the encoded byte array of the input resource string based on the encoding.
*
* @param encoding the encoding to used
* @param encodedResource the resource to encode
* @return byte array of the resource
*/
@Nonnull
private byte[] getResourceBinary(ResourceEncodingEnum encoding, String encodedResource) {
byte[] resourceBinary;
switch (encoding) {
case JSON:
resourceBinary = encodedResource.getBytes(StandardCharsets.UTF_8);
break;
case JSONC:
resourceBinary = GZipUtil.compress(encodedResource);
break;
default:
case DEL:
case ESR:
resourceBinary = new byte[0];
break;
}
return resourceBinary;
}
/** /**
* helper to format the meta element for serialization of the resource. * helper to format the meta element for serialization of the resource.
* *
@ -1437,8 +1394,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
List<String> excludeElements = new ArrayList<>(8); List<String> excludeElements = new ArrayList<>(8);
getExcludedElements(historyEntity.getResourceType(), excludeElements, theResource.getMeta()); getExcludedElements(historyEntity.getResourceType(), excludeElements, theResource.getMeta());
String encodedResourceString = encodeResource(theResource, encoding, excludeElements, myContext); String encodedResourceString = encodeResource(theResource, encoding, excludeElements, myContext);
byte[] resourceBinary = getResourceBinary(encoding, encodedResourceString); boolean changed = !StringUtils.equals(historyEntity.getResourceTextVc(), encodedResourceString);
boolean changed = !Arrays.equals(historyEntity.getResource(), resourceBinary);
historyEntity.setUpdated(theTransactionDetails.getTransactionDate()); historyEntity.setUpdated(theTransactionDetails.getTransactionDate());
@ -1450,19 +1406,14 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
return historyEntity; return historyEntity;
} }
if (getStorageSettings().getInlineResourceTextBelowSize() > 0 populateEncodedResource(encodedResource, encodedResourceString, ResourceEncodingEnum.JSON);
&& encodedResourceString.length() < getStorageSettings().getInlineResourceTextBelowSize()) {
populateEncodedResource(encodedResource, encodedResourceString, null, ResourceEncodingEnum.JSON);
} else {
populateEncodedResource(encodedResource, null, resourceBinary, encoding);
}
} }
/* /*
* Save the resource itself to the resourceHistoryTable * Save the resource itself to the resourceHistoryTable
*/ */
historyEntity = myEntityManager.merge(historyEntity); historyEntity = myEntityManager.merge(historyEntity);
historyEntity.setEncoding(encodedResource.getEncoding()); historyEntity.setEncoding(encodedResource.getEncoding());
historyEntity.setResource(encodedResource.getResourceBinary());
historyEntity.setResourceTextVc(encodedResource.getResourceText()); historyEntity.setResourceTextVc(encodedResource.getResourceText());
myResourceHistoryTableDao.save(historyEntity); myResourceHistoryTableDao.save(historyEntity);
@ -1472,12 +1423,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
private void populateEncodedResource( private void populateEncodedResource(
EncodedResource encodedResource, EncodedResource encodedResource, String encodedResourceString, ResourceEncodingEnum theEncoding) {
String encodedResourceString,
byte[] theResourceBinary,
ResourceEncodingEnum theEncoding) {
encodedResource.setResourceText(encodedResourceString); encodedResource.setResourceText(encodedResourceString);
encodedResource.setResourceBinary(theResourceBinary);
encodedResource.setEncoding(theEncoding); encodedResource.setEncoding(theEncoding);
} }
@ -1542,7 +1489,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
historyEntry.setEncoding(theChanged.getEncoding()); historyEntry.setEncoding(theChanged.getEncoding());
historyEntry.setResource(theChanged.getResourceBinary());
historyEntry.setResourceTextVc(theChanged.getResourceText()); historyEntry.setResourceTextVc(theChanged.getResourceText());
ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId()); ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId());

View File

@ -1689,19 +1689,17 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC
|| historyEntity.getEncoding() == ResourceEncodingEnum.JSON) { || historyEntity.getEncoding() == ResourceEncodingEnum.JSON) {
byte[] resourceBytes = historyEntity.getResource(); byte[] resourceBytes = historyEntity.getResource();
// Always migrate data out of the bytes column
if (resourceBytes != null) { if (resourceBytes != null) {
String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding()); String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding());
if (myStorageSettings.getInlineResourceTextBelowSize() > 0 ourLog.debug(
&& resourceText.length() < myStorageSettings.getInlineResourceTextBelowSize()) { "Storing text of resource {} version {} as inline VARCHAR",
ourLog.debug( entity.getResourceId(),
"Storing text of resource {} version {} as inline VARCHAR", historyEntity.getVersion());
entity.getResourceId(), historyEntity.setResourceTextVc(resourceText);
historyEntity.getVersion()); historyEntity.setEncoding(ResourceEncodingEnum.JSON);
historyEntity.setResourceTextVc(resourceText); changed = true;
historyEntity.setResource(null);
historyEntity.setEncoding(ResourceEncodingEnum.JSON);
changed = true;
}
} }
} }
if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) { if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
@ -2071,6 +2069,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}); });
} }
@Override
public <PID extends IResourcePersistentId<?>> Stream<PID> searchForIdStream( public <PID extends IResourcePersistentId<?>> Stream<PID> searchForIdStream(
SearchParameterMap theParams, SearchParameterMap theParams,
RequestDetails theRequest, RequestDetails theRequest,

View File

@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
class EncodedResource { class EncodedResource {
private boolean myChanged; private boolean myChanged;
private byte[] myResource;
private ResourceEncodingEnum myEncoding; private ResourceEncodingEnum myEncoding;
private String myResourceText; private String myResourceText;
@ -36,14 +35,6 @@ class EncodedResource {
myEncoding = theEncoding; myEncoding = theEncoding;
} }
public byte[] getResourceBinary() {
return myResource;
}
public void setResourceBinary(byte[] theResource) {
myResource = theResource;
}
public boolean isChanged() { public boolean isChanged() {
return myChanged; return myChanged;
} }

View File

@ -79,4 +79,16 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
@Modifying @Modifying
@Query("DELETE FROM ResourceHistoryTable t WHERE t.myId = :pid") @Query("DELETE FROM ResourceHistoryTable t WHERE t.myId = :pid")
void deleteByPid(@Param("pid") Long theId); void deleteByPid(@Param("pid") Long theId);
/**
* This method is only for use in unit tests - It is used to move the stored resource body contents from the new
* <code>RES_TEXT_VC</code> column to the legacy <code>RES_TEXT</code> column, which is where data may have
* been stored by versions of HAPI FHIR prior to 7.0.0
*
* @since 7.0.0
*/
@Modifying
@Query(
"UPDATE ResourceHistoryTable r SET r.myResourceTextVc = null, r.myResource = :text, r.myEncoding = 'JSONC' WHERE r.myId = :pid")
void updateNonInlinedContents(@Param("text") byte[] theText, @Param("pid") long thePid);
} }

View File

@ -1555,11 +1555,12 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
Builder.BuilderWithTableName nrmlTable = version.onTable("HFJ_SPIDX_QUANTITY_NRML"); Builder.BuilderWithTableName nrmlTable = version.onTable("HFJ_SPIDX_QUANTITY_NRML");
nrmlTable.addColumn("20210111.1", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); nrmlTable.addColumn("20210111.1", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT);
nrmlTable.addColumn("20210111.2", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); nrmlTable.addColumn("20210111.2", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY);
// - The fk name is generated from Hibernate, have to use this name here // Disabled - superceded by 20220304.33
nrmlTable nrmlTable
.addForeignKey("20210111.3", "FKRCJOVMUH5KC0O6FVBLE319PYV") .addForeignKey("20210111.3", "FKRCJOVMUH5KC0O6FVBLE319PYV")
.toColumn("RES_ID") .toColumn("RES_ID")
.references("HFJ_RESOURCE", "RES_ID"); .references("HFJ_RESOURCE", "RES_ID")
.doNothing();
Builder.BuilderWithTableName quantityTable = version.onTable("HFJ_SPIDX_QUANTITY"); Builder.BuilderWithTableName quantityTable = version.onTable("HFJ_SPIDX_QUANTITY");
quantityTable quantityTable

View File

@ -0,0 +1,40 @@
package ca.uhn.fhir.jpa.entity;
import ca.uhn.fhir.util.ClasspathUtil;
import org.apache.commons.lang3.StringUtils;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
public class GeneratedSchemaTest {
/**
* Make sure that the RES_TEXT_VC column, which is supposed to be an unlimited-length
* string datatype, actually uses an appropriate datatype on the various databases
* we care about.
*/
@Test
public void testVerifyLongVarcharColumnDefinition() {
validateLongVarcharDatatype("cockroachdb.sql", "varchar(2147483647)");
validateLongVarcharDatatype("derby.sql", "clob");
validateLongVarcharDatatype("mysql.sql", "longtext");
validateLongVarcharDatatype("mariadb.sql", "longtext");
validateLongVarcharDatatype("h2.sql", "clob");
validateLongVarcharDatatype("postgres.sql", "text");
validateLongVarcharDatatype("oracle.sql", "clob");
validateLongVarcharDatatype("sqlserver.sql", "varchar(max)");
}
private static void validateLongVarcharDatatype(String schemaName, String expectedDatatype) {
String schema = ClasspathUtil.loadResource("ca/uhn/hapi/fhir/jpa/docs/database/" + schemaName);
String[] lines = StringUtils.split(schema, '\n');
String resTextVc = Arrays.stream(lines).filter(t -> t.contains("RES_TEXT_VC ")).findFirst().orElseThrow();
assertThat("Wrong type in " + schemaName, resTextVc, containsString("RES_TEXT_VC " + expectedDatatype));
}
}

View File

@ -25,14 +25,15 @@ import ca.uhn.fhir.rest.api.Constants;
import jakarta.persistence.*; import jakarta.persistence.*;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.annotations.JdbcTypeCode; import org.hibernate.Length;
import org.hibernate.annotations.OptimisticLock; import org.hibernate.annotations.OptimisticLock;
import org.hibernate.type.SqlTypes;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import static org.apache.commons.lang3.StringUtils.defaultString;
@Entity @Entity
@Table( @Table(
name = ResourceHistoryTable.HFJ_RES_VER, name = ResourceHistoryTable.HFJ_RES_VER,
@ -57,7 +58,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
public static final int ENCODING_COL_LENGTH = 5; public static final int ENCODING_COL_LENGTH = 5;
public static final String HFJ_RES_VER = "HFJ_RES_VER"; public static final String HFJ_RES_VER = "HFJ_RES_VER";
public static final int RES_TEXT_VC_MAX_LENGTH = 4000;
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
@Id @Id
@ -86,13 +86,15 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
@OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true) @OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
private Collection<ResourceHistoryTag> myTags; private Collection<ResourceHistoryTag> myTags;
/**
* Note: No setter for this field because it's only a legacy way of storing data now.
*/
@Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = true) @Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = true)
@Lob() @Lob()
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private byte[] myResource; private byte[] myResource;
@Column(name = "RES_TEXT_VC", length = RES_TEXT_VC_MAX_LENGTH, nullable = true) @Column(name = "RES_TEXT_VC", nullable = true, length = Length.LONG32)
@JdbcTypeCode(SqlTypes.LONG32VARCHAR)
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private String myResourceTextVc; private String myResourceTextVc;
@ -153,7 +155,8 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
} }
public void setResourceTextVc(String theResourceTextVc) { public void setResourceTextVc(String theResourceTextVc) {
myResourceTextVc = theResourceTextVc; myResource = null;
myResourceTextVc = defaultString(theResourceTextVc);
} }
public ResourceHistoryProvenanceEntity getProvenance() { public ResourceHistoryProvenanceEntity getProvenance() {
@ -209,10 +212,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
return myResource; return myResource;
} }
public void setResource(byte[] theResource) {
myResource = theResource;
}
@Override @Override
public Long getResourceId() { public Long getResourceId() {
return myResourceId; return myResourceId;

View File

@ -41,14 +41,12 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test {
public void before() throws Exception { public void before() throws Exception {
super.before(); super.before();
myPartitionSettings.setPartitioningEnabled(false); myPartitionSettings.setPartitioningEnabled(false);
myStorageSettings.setInlineResourceTextBelowSize(10000);
} }
@AfterEach @AfterEach
@Override @Override
public void after() { public void after() {
super.after(); super.after();
myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize());
} }
@Test @Test

View File

@ -91,7 +91,6 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
myStorageSettings.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED); myStorageSettings.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
myStorageSettings.setIndexOnContainedResources(new JpaStorageSettings().isIndexOnContainedResources()); myStorageSettings.setIndexOnContainedResources(new JpaStorageSettings().isIndexOnContainedResources());
myStorageSettings.setIndexOnContainedResourcesRecursively(new JpaStorageSettings().isIndexOnContainedResourcesRecursively()); myStorageSettings.setIndexOnContainedResourcesRecursively(new JpaStorageSettings().isIndexOnContainedResourcesRecursively());
myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize());
} }
@Test @Test

View File

@ -1,102 +1,61 @@
package ca.uhn.fhir.jpa.dao.r4; package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.dao.GZipUtil;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import org.apache.commons.lang3.StringUtils; import ca.uhn.fhir.rest.param.DateRangeParam;
import org.hl7.fhir.r4.model.IdType; import ca.uhn.fhir.rest.param.HistorySearchDateRangeParam;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.HashMap;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
public class FhirResourceDaoR4InlineResourceModeTest extends BaseJpaR4Test { public class FhirResourceDaoR4InlineResourceModeTest extends BaseJpaR4Test {
@BeforeEach
public void beforeSetDao() {
myStorageSettings.setInlineResourceTextBelowSize(5000);
}
@AfterEach
public void afterResetDao() {
myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize());
}
@Test @Test
public void testCreateWithInlineResourceTextStorage() { public void testRetrieveNonInlinedResource() {
Patient patient = new Patient(); IIdType id = createPatient(withActiveTrue());
patient.setActive(true); Long pid = id.getIdPartAsLong();
Long resourceId = myPatientDao.create(patient).getId().getIdPartAsLong();
patient = new Patient(); relocateResourceTextToCompressedColumn(pid, 1L);
patient.setId("Patient/" + resourceId);
patient.setActive(false);
myPatientDao.update(patient);
runInTransaction(() -> { runInTransaction(()->{
// Version 1 ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(pid, 1);
ResourceHistoryTable entity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(resourceId, 1); assertNotNull(historyEntity.getResource());
assertNull(entity.getResource()); assertNull(historyEntity.getResourceTextVc());
assertThat(entity.getResourceTextVc(), containsString("\"active\":true")); assertEquals(ResourceEncodingEnum.JSONC, historyEntity.getEncoding());
// Version 2
entity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(resourceId, 2);
assertNull(entity.getResource());
assertThat(entity.getResourceTextVc(), containsString("\"active\":false"));
}); });
patient = myPatientDao.read(new IdType("Patient/" + resourceId)); // Read
assertFalse(patient.getActive()); validatePatient(myPatientDao.read(id.withVersion(null), mySrd));
patient = (Patient) myPatientDao.search(SearchParameterMap.newSynchronous()).getAllResources().get(0); // VRead
assertFalse(patient.getActive()); validatePatient(myPatientDao.read(id.withVersion("1"), mySrd));
// Search (Sync)
validatePatient(myPatientDao.search(SearchParameterMap.newSynchronous(), mySrd).getResources(0, 1).get(0));
// Search (Async)
validatePatient(myPatientDao.search(new SearchParameterMap(), mySrd).getResources(0, 1).get(0));
// History
validatePatient(myPatientDao.history(id, new HistorySearchDateRangeParam(new HashMap<>(), new DateRangeParam(), 0), mySrd).getResources(0, 1).get(0));
} }
@Test private void validatePatient(IBaseResource theRead) {
public void testDontUseInlineAboveThreshold() { assertTrue(((Patient)theRead).getActive());
String veryLongFamilyName = StringUtils.leftPad("", 6000, 'a');
Patient patient = new Patient();
patient.setActive(true);
patient.addName().setFamily(veryLongFamilyName);
Long resourceId = myPatientDao.create(patient).getId().getIdPartAsLong();
runInTransaction(() -> {
// Version 1
ResourceHistoryTable entity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(resourceId, 1);
assertNotNull(entity.getResource());
assertNull(entity.getResourceTextVc());
});
patient = myPatientDao.read(new IdType("Patient/" + resourceId));
assertEquals(veryLongFamilyName, patient.getNameFirstRep().getFamily());
}
@Test
public void testNopOnUnchangedUpdate() {
Patient patient = new Patient();
patient.setActive(true);
Long resourceId = myPatientDao.create(patient).getId().getIdPartAsLong();
patient = new Patient();
patient.setId("Patient/" + resourceId);
patient.setActive(true);
DaoMethodOutcome updateOutcome = myPatientDao.update(patient);
assertEquals("1", updateOutcome.getId().getVersionIdPart());
assertTrue(updateOutcome.isNop());
} }

View File

@ -1031,26 +1031,15 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
@ParameterizedTest @ParameterizedTest
@CsvSource({ @CsvSource({
// NoOp OptimisticLock OptimizeMode ExpectedSelect ExpectedUpdate // OptimisticLock OptimizeMode ExpectedSelect ExpectedUpdate
" false, false, CURRENT_VERSION, 2, 1", " false, CURRENT_VERSION, 2, 0",
" true, false, CURRENT_VERSION, 2, 0", " true, CURRENT_VERSION, 12, 0",
" false, true, CURRENT_VERSION, 12, 1", " false, ALL_VERSIONS, 12, 0",
" true, true, CURRENT_VERSION, 12, 0", " true, ALL_VERSIONS, 22, 0",
" false, false, ALL_VERSIONS, 12, 10",
" true, false, ALL_VERSIONS, 12, 0",
" false, true, ALL_VERSIONS, 22, 10",
" true, true, ALL_VERSIONS, 22, 0",
}) })
public void testReindexJob_OptimizeStorage(boolean theNoOp, boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) { public void testReindexJob_OptimizeStorage(boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) {
// Setup // Setup
// In no-op mode, the inlining is already in the state it needs to be in
if (theNoOp) {
myStorageSettings.setInlineResourceTextBelowSize(10000);
} else {
myStorageSettings.setInlineResourceTextBelowSize(0);
}
ResourceIdListWorkChunkJson data = new ResourceIdListWorkChunkJson(); ResourceIdListWorkChunkJson data = new ResourceIdListWorkChunkJson();
IIdType patientId = createPatient(withActiveTrue()); IIdType patientId = createPatient(withActiveTrue());
for (int i = 0; i < 10; i++) { for (int i = 0; i < 10; i++) {

View File

@ -274,7 +274,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
ResourceHistoryTable newHistory = table.toHistory(true); ResourceHistoryTable newHistory = table.toHistory(true);
ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(table.getId(), 1L); ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(table.getId(), 1L);
newHistory.setEncoding(currentHistory.getEncoding()); newHistory.setEncoding(currentHistory.getEncoding());
newHistory.setResource(currentHistory.getResource()); newHistory.setResourceTextVc(currentHistory.getResourceTextVc());
myResourceHistoryTableDao.save(newHistory); myResourceHistoryTableDao.save(newHistory);
}); });
@ -2928,7 +2928,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L); ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L);
String newContent = myFhirContext.newJsonParser().encodeResourceToString(p); String newContent = myFhirContext.newJsonParser().encodeResourceToString(p);
newContent = newContent.replace("male", "foo"); newContent = newContent.replace("male", "foo");
table.setResource(newContent.getBytes(Charsets.UTF_8)); table.setResourceTextVc(newContent);
table.setEncoding(ResourceEncodingEnum.JSON); table.setEncoding(ResourceEncodingEnum.JSON);
myResourceHistoryTableDao.save(table); myResourceHistoryTableDao.save(table);
} }

View File

@ -620,11 +620,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
template.execute((TransactionCallback<ResourceTable>) t -> { template.execute((TransactionCallback<ResourceTable>) t -> {
ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), id.getVersionIdPartAsLong()); ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), id.getVersionIdPartAsLong());
resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON); resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON);
try { resourceHistoryTable.setResourceTextVc("{\"resourceType\":\"FOO\"}");
resourceHistoryTable.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new Error(e);
}
myResourceHistoryTableDao.save(resourceHistoryTable); myResourceHistoryTableDao.save(resourceHistoryTable);
ResourceTable table = myResourceTableDao.findById(id.getIdPartAsLong()).orElseThrow(IllegalStateException::new); ResourceTable table = myResourceTableDao.findById(id.getIdPartAsLong()).orElseThrow(IllegalStateException::new);
@ -1917,11 +1913,11 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
Patient p = new Patient(); Patient p = new Patient();
p.addIdentifier().setSystem("urn:system").setValue(methodName); p.addIdentifier().setSystem("urn:system").setValue(methodName);
myPatientDao.create(p, mySrd).getId(); myPatientDao.create(p, mySrd);
p = new Patient(); p = new Patient();
p.addIdentifier().setSystem("urn:system").setValue(methodName); p.addIdentifier().setSystem("urn:system").setValue(methodName);
myPatientDao.create(p, mySrd).getId(); myPatientDao.create(p, mySrd);
Observation o = new Observation(); Observation o = new Observation();
o.getCode().setText("Some Observation"); o.getCode().setText("Some Observation");

View File

@ -87,6 +87,11 @@ public class ReindexJobTest extends BaseJpaR4Test {
createPatient(withActiveTrue()); createPatient(withActiveTrue());
} }
// Move resource text to compressed storage, which we don't write to anymore but legacy
// data may exist that was previously stored there, so we're simulating that.
List<ResourceHistoryTable> allHistoryEntities = runInTransaction(() -> myResourceHistoryTableDao.findAll());
allHistoryEntities.forEach(t->relocateResourceTextToCompressedColumn(t.getResourceId(), t.getVersion()));
runInTransaction(()->{ runInTransaction(()->{
assertEquals(20, myResourceHistoryTableDao.count()); assertEquals(20, myResourceHistoryTableDao.count());
for (ResourceHistoryTable history : myResourceHistoryTableDao.findAll()) { for (ResourceHistoryTable history : myResourceHistoryTableDao.findAll()) {
@ -141,6 +146,11 @@ public class ReindexJobTest extends BaseJpaR4Test {
createPatient(withActiveTrue()); createPatient(withActiveTrue());
} }
// Move resource text to compressed storage, which we don't write to anymore but legacy
// data may exist that was previously stored there, so we're simulating that.
List<ResourceHistoryTable> allHistoryEntities = runInTransaction(() -> myResourceHistoryTableDao.findAll());
allHistoryEntities.forEach(t->relocateResourceTextToCompressedColumn(t.getResourceId(), t.getVersion()));
runInTransaction(()->{ runInTransaction(()->{
assertEquals(20, myResourceHistoryTableDao.count()); assertEquals(20, myResourceHistoryTableDao.count());
for (ResourceHistoryTable history : myResourceHistoryTableDao.findAll()) { for (ResourceHistoryTable history : myResourceHistoryTableDao.findAll()) {
@ -149,8 +159,6 @@ public class ReindexJobTest extends BaseJpaR4Test {
} }
}); });
myStorageSettings.setInlineResourceTextBelowSize(10000);
// execute // execute
JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);

View File

@ -38,11 +38,9 @@ public class ResourceProviderInvalidDataR4Test extends BaseResourceProviderR4Tes
// Manually set the value to be an invalid decimal number // Manually set the value to be an invalid decimal number
runInTransaction(() -> { runInTransaction(() -> {
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id, 1); ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id, 1);
byte[] bytesCompressed = resVer.getResource(); String resourceText = resVer.getResourceTextVc();
String resourceText = GZipUtil.decompress(bytesCompressed);
resourceText = resourceText.replace("100", "-.100"); resourceText = resourceText.replace("100", "-.100");
bytesCompressed = GZipUtil.compress(resourceText); resVer.setResourceTextVc(resourceText);
resVer.setResource(bytesCompressed);
myResourceHistoryTableDao.save(resVer); myResourceHistoryTableDao.save(resVer);
}); });

View File

@ -54,6 +54,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor; import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
import ca.uhn.fhir.util.ClasspathUtil; import ca.uhn.fhir.util.ClasspathUtil;
import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.TestUtil;
import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.util.UrlUtil;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
@ -6723,7 +6724,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
// Update Patient after delay // Update Patient after delay
int delayInMs = 1000; int delayInMs = 1000;
TimeUnit.MILLISECONDS.sleep(delayInMs); TestUtil.sleepAtLeast(delayInMs + 100);
patient.getNameFirstRep().addGiven("Bob"); patient.getNameFirstRep().addGiven("Bob");
myClient.update().resource(patient).execute(); myClient.update().resource(patient).execute();

View File

@ -397,6 +397,11 @@ public class GiantTransactionPerfTest {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
public void updateNonInlinedContents(byte[] theText, long thePid) {
throw new UnsupportedOperationException();
}
@Nonnull @Nonnull
@Override @Override
public List<ResourceHistoryTable> findAll() { public List<ResourceHistoryTable> findAll() {

View File

@ -34,6 +34,20 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- Needed for Testcontainers -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -0,0 +1,148 @@
package ca.uhn.fhir.jpa.dao.r5.database;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.embedded.JpaEmbeddedDatabase;
import ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc;
import ca.uhn.fhir.jpa.migrate.MigrationTaskList;
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
import ca.uhn.fhir.jpa.test.config.TestR5Config;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.util.VersionEnum;
import jakarta.persistence.EntityManagerFactory;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r5.model.Patient;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.envers.repository.support.EnversRevisionRepositoryFactoryBean;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import javax.sql.DataSource;
import java.util.Properties;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
@ExtendWith(SpringExtension.class)
@EnableJpaRepositories(repositoryFactoryBeanClass = EnversRevisionRepositoryFactoryBean.class)
@ContextConfiguration(classes = {BaseDatabaseVerificationIT.TestConfig.class})
public abstract class BaseDatabaseVerificationIT {
private static final Logger ourLog = LoggerFactory.getLogger(BaseDatabaseVerificationIT.class);
private static final String MIGRATION_TABLENAME = "MIGRATIONS";
@Autowired
EntityManagerFactory myEntityManagerFactory;
@Autowired
JpaEmbeddedDatabase myJpaEmbeddedDatabase;
@Autowired
IFhirResourceDao<Patient> myPatientDao;
@ParameterizedTest
@ValueSource(ints = {10, 100000})
public void testCreateRead(int theSize) {
String name = StringUtils.leftPad("", theSize, "a");
Patient patient = new Patient();
patient.setActive(true);
patient.addName().setFamily(name);
IIdType id = myPatientDao.create(patient, new SystemRequestDetails()).getId();
Patient actual = myPatientDao.read(id, new SystemRequestDetails());
assertEquals(name, actual.getName().get(0).getFamily());
}
@Test
public void testDelete() {
Patient patient = new Patient();
patient.setActive(true);
IIdType id = myPatientDao.create(patient, new SystemRequestDetails()).getId().toUnqualifiedVersionless();
myPatientDao.delete(id, new SystemRequestDetails());
assertThrows(ResourceGoneException.class, () -> myPatientDao.read(id, new SystemRequestDetails()));
}
@Configuration
public static class TestConfig extends TestR5Config {
@Autowired
private JpaDatabaseContextConfigParamObject myJpaDatabaseContextConfigParamObject;
@Override
@Bean
public DataSource dataSource() {
DataSource dataSource = myJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase().getDataSource();
HapiMigrationDao hapiMigrationDao = new HapiMigrationDao(dataSource, myJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase().getDriverType(), MIGRATION_TABLENAME);
HapiMigrationStorageSvc hapiMigrationStorageSvc = new HapiMigrationStorageSvc(hapiMigrationDao);
MigrationTaskList tasks = new HapiFhirJpaMigrationTasks(Set.of()).getAllTasks(VersionEnum.values());
SchemaMigrator schemaMigrator = new SchemaMigrator(
"HAPI FHIR", MIGRATION_TABLENAME, dataSource, new Properties(), tasks, hapiMigrationStorageSvc);
schemaMigrator.setDriverType(myJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase().getDriverType());
ourLog.info("About to run migration...");
schemaMigrator.createMigrationTableIfRequired();
schemaMigrator.migrate();
ourLog.info("Migration complete");
return dataSource;
}
@Bean
public JpaEmbeddedDatabase jpaEmbeddedDatabase(JpaDatabaseContextConfigParamObject theJpaDatabaseContextConfigParamObject) {
return theJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase();
}
@Override
protected Properties jpaProperties() {
Properties retVal = super.jpaProperties();
retVal.put("hibernate.hbm2ddl.auto", "none");
retVal.put("hibernate.dialect", myJpaDatabaseContextConfigParamObject.getDialect());
return retVal;
}
}
public static class JpaDatabaseContextConfigParamObject {
private JpaEmbeddedDatabase myJpaEmbeddedDatabase;
private String myDialect;
public JpaDatabaseContextConfigParamObject(JpaEmbeddedDatabase theJpaEmbeddedDatabase, String theDialect) {
myJpaEmbeddedDatabase = theJpaEmbeddedDatabase;
myDialect = theDialect;
}
public JpaEmbeddedDatabase getJpaEmbeddedDatabase() {
return myJpaEmbeddedDatabase;
}
public String getDialect() {
return myDialect;
}
}
}

View File

@ -0,0 +1,27 @@
package ca.uhn.fhir.jpa.dao.r5.database;
import ca.uhn.fhir.jpa.embedded.MsSqlEmbeddedDatabase;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirSQLServerDialect;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
@ContextConfiguration(classes = {
DatabaseVerificationWithMsSqlIT.TestConfig.class
})
public class DatabaseVerificationWithMsSqlIT extends BaseDatabaseVerificationIT {
@Configuration
public static class TestConfig {
@Bean
public JpaDatabaseContextConfigParamObject jpaDatabaseParamObject() {
return new JpaDatabaseContextConfigParamObject(
new MsSqlEmbeddedDatabase(),
HapiFhirSQLServerDialect.class.getName()
);
}
}
}

View File

@ -0,0 +1,26 @@
package ca.uhn.fhir.jpa.dao.r5.database;
import ca.uhn.fhir.jpa.embedded.OracleEmbeddedDatabase;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirOracleDialect;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
@ContextConfiguration(classes = {
DatabaseVerificationWithOracleIT.TestConfig.class
})
public class DatabaseVerificationWithOracleIT extends BaseDatabaseVerificationIT {
@Configuration
public static class TestConfig {
@Bean
public JpaDatabaseContextConfigParamObject jpaDatabaseParamObject(){
return new JpaDatabaseContextConfigParamObject(
new OracleEmbeddedDatabase(),
HapiFhirOracleDialect.class.getName()
);
}
}
}

View File

@ -0,0 +1,26 @@
package ca.uhn.fhir.jpa.dao.r5.database;
import ca.uhn.fhir.jpa.embedded.PostgresEmbeddedDatabase;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
@ContextConfiguration(classes = {
DatabaseVerificationWithPostgresIT.TestConfig.class
})
public class DatabaseVerificationWithPostgresIT extends BaseDatabaseVerificationIT {
@Configuration
public static class TestConfig {
@Bean
public JpaDatabaseContextConfigParamObject jpaDatabaseParamObject() {
return new JpaDatabaseContextConfigParamObject(
new PostgresEmbeddedDatabase(),
HapiFhirPostgresDialect.class.getName()
);
}
}
}

View File

@ -131,20 +131,14 @@
<dependency> <dependency>
<groupId>org.testcontainers</groupId> <groupId>org.testcontainers</groupId>
<artifactId>postgresql</artifactId> <artifactId>postgresql</artifactId>
<version>1.17.6</version>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.testcontainers</groupId> <groupId>org.testcontainers</groupId>
<artifactId>mssqlserver</artifactId> <artifactId>mssqlserver</artifactId>
<version>1.17.6</version>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.testcontainers</groupId> <groupId>org.testcontainers</groupId>
<artifactId>oracle-xe</artifactId> <artifactId>oracle-xe</artifactId>
<version>1.17.6</version>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.postgresql</groupId> <groupId>org.postgresql</groupId>

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.embedded; package ca.uhn.fhir.jpa.embedded;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import jakarta.annotation.PreDestroy;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -53,6 +54,7 @@ public abstract class JpaEmbeddedDatabase {
private JdbcTemplate myJdbcTemplate; private JdbcTemplate myJdbcTemplate;
private Connection myConnection; private Connection myConnection;
@PreDestroy
public abstract void stop(); public abstract void stop();
public abstract void disableConstraints(); public abstract void disableConstraints();
@ -116,7 +118,7 @@ public abstract class JpaEmbeddedDatabase {
for (String sql : theStatements) { for (String sql : theStatements) {
if (!StringUtils.isBlank(sql)) { if (!StringUtils.isBlank(sql)) {
statement.addBatch(sql); statement.addBatch(sql);
ourLog.info("Added to batch: {}", sql); ourLog.debug("Added to batch: {}", sql);
} }
} }
statement.executeBatch(); statement.executeBatch();

View File

@ -40,6 +40,7 @@ import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor; import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider; import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
import ca.uhn.fhir.jpa.dao.GZipUtil;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository; import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository; import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
@ -82,6 +83,7 @@ import ca.uhn.fhir.jpa.entity.TermValueSet;
import ca.uhn.fhir.jpa.entity.TermValueSetConcept; import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor; import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor;
import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.packages.IPackageInstallerSvc; import ca.uhn.fhir.jpa.packages.IPackageInstallerSvc;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
import ca.uhn.fhir.jpa.provider.JpaSystemProvider; import ca.uhn.fhir.jpa.provider.JpaSystemProvider;
@ -663,6 +665,14 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
return myTxManager; return myTxManager;
} }
protected void relocateResourceTextToCompressedColumn(Long theResourcePid, Long theVersion) {
runInTransaction(()->{
ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theResourcePid, theVersion);
byte[] contents = GZipUtil.compress(historyEntity.getResourceTextVc());
myResourceHistoryTableDao.updateNonInlinedContents(contents, historyEntity.getId());
});
}
protected ValidationResult validateWithResult(IBaseResource theResource) { protected ValidationResult validateWithResult(IBaseResource theResource) {
FhirValidator validatorModule = myFhirContext.newValidator(); FhirValidator validatorModule = myFhirContext.newValidator();
FhirInstanceValidator instanceValidator = new FhirInstanceValidator(myValidationSupport); FhirInstanceValidator instanceValidator = new FhirInstanceValidator(myValidationSupport);

View File

@ -186,7 +186,7 @@ public class TestR5Config {
return retVal; return retVal;
} }
private Properties jpaProperties() { protected Properties jpaProperties() {
Properties extraProperties = new Properties(); Properties extraProperties = new Properties();
extraProperties.put("hibernate.format_sql", "false"); extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.show_sql", "false");

View File

@ -34,7 +34,7 @@ public enum ColumnTypeEnum {
/** /**
* Unlimited length text, with a column definition containing the annotation: * Unlimited length text, with a column definition containing the annotation:
* <code>@JdbcTypeCode(SqlTypes.LONG32VARCHAR)</code> * <code>@Column(length=Integer.MAX_VALUE)</code>
*/ */
TEXT, TEXT,
BIG_DECIMAL; BIG_DECIMAL;

View File

@ -62,7 +62,7 @@ public final class ColumnTypeToDriverTypeToSqlType {
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.MYSQL_5_7, "double precision"); setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.MYSQL_5_7, "double precision");
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.MSSQL_2012, "double precision"); setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.MSSQL_2012, "double precision");
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.ORACLE_12C, "double precision"); setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.ORACLE_12C, "double precision");
setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.POSTGRES_9_4, "float8"); setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.POSTGRES_9_4, "double precision");
setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.H2_EMBEDDED, "bigint"); setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.H2_EMBEDDED, "bigint");
setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.DERBY_EMBEDDED, "bigint"); setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.DERBY_EMBEDDED, "bigint");
@ -123,7 +123,7 @@ public final class ColumnTypeToDriverTypeToSqlType {
"oid"); // the PG driver will write oid into a `text` column "oid"); // the PG driver will write oid into a `text` column
setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.MSSQL_2012, "varchar(MAX)"); setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.MSSQL_2012, "varchar(MAX)");
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.H2_EMBEDDED, "character large object"); setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.H2_EMBEDDED, "clob");
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.DERBY_EMBEDDED, "clob"); setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.DERBY_EMBEDDED, "clob");
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.MARIADB_10_1, "longtext"); setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.MARIADB_10_1, "longtext");
setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.MYSQL_5_7, "longtext"); setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.MYSQL_5_7, "longtext");

View File

@ -267,14 +267,6 @@ public class JpaStorageSettings extends StorageSettings {
* @since 5.6.0 * @since 5.6.0
*/ */
private boolean myAdvancedHSearchIndexing = false; private boolean myAdvancedHSearchIndexing = false;
/**
* If set to a positive number, any resources with a character length at or below the given number
* of characters will be stored inline in the <code>HFJ_RES_VER</code> table instead of using a
* separate LOB column.
*
* @since 5.7.0
*/
private int myInlineResourceTextBelowSize = 0;
/** /**
* @since 5.7.0 * @since 5.7.0
@ -381,25 +373,21 @@ public class JpaStorageSettings extends StorageSettings {
} }
/** /**
* If set to a positive number, any resources with a character length at or below the given number
* of characters will be stored inline in the <code>HFJ_RES_VER</code> table instead of using a
* separate LOB column.
*
* @since 5.7.0 * @since 5.7.0
* @deprecated This setting no longer does anything as of HAPI FHIR 7.0.0
*/ */
@Deprecated
public int getInlineResourceTextBelowSize() { public int getInlineResourceTextBelowSize() {
return myInlineResourceTextBelowSize; return 0;
} }
/** /**
* If set to a positive number, any resources with a character length at or below the given number
* of characters will be stored inline in the <code>HFJ_RES_VER</code> table instead of using a
* separate LOB column.
*
* @since 5.7.0 * @since 5.7.0
* @deprecated This setting no longer does anything as of HAPI FHIR 7.0.0
*/ */
@Deprecated
public void setInlineResourceTextBelowSize(int theInlineResourceTextBelowSize) { public void setInlineResourceTextBelowSize(int theInlineResourceTextBelowSize) {
myInlineResourceTextBelowSize = theInlineResourceTextBelowSize; // ignored
} }
/** /**

View File

@ -361,9 +361,6 @@ public class JpaModelScannerAndVerifier {
if (!theIsView && column.length() == 255) { if (!theIsView && column.length() == 255) {
throw new IllegalStateException(Msg.code(1626) + "Field does not have an explicit maximum length specified: " + field); throw new IllegalStateException(Msg.code(1626) + "Field does not have an explicit maximum length specified: " + field);
} }
if (column.length() > MAX_COL_LENGTH) {
throw new IllegalStateException(Msg.code(1627) + "Field is too long: " + field);
}
} }
Size size = theAnnotatedElement.getAnnotation(Size.class); Size size = theAnnotatedElement.getAnnotation(Size.class);

View File

@ -184,14 +184,6 @@
<dependency> <dependency>
<groupId>org.apache.maven</groupId> <groupId>org.apache.maven</groupId>
<artifactId>maven-plugin-api</artifactId> <artifactId>maven-plugin-api</artifactId>
<!--
<exclusions>
<exclusion>
<groupId>org.eclipse.sisu</groupId>
<artifactId>org.eclipse.sisu.plexus</artifactId>
</exclusion>
</exclusions>
-->
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.maven.plugin-tools</groupId> <groupId>org.apache.maven.plugin-tools</groupId>

View File

@ -1,6 +1,7 @@
package ca.uhn.fhir.tinder.ddl; package ca.uhn.fhir.tinder.ddl;
import ca.uhn.fhir.jpa.util.ISequenceValueMassager; import ca.uhn.fhir.jpa.util.ISequenceValueMassager;
import ca.uhn.fhir.util.IoUtil;
import jakarta.annotation.Nonnull; import jakarta.annotation.Nonnull;
import jakarta.persistence.Entity; import jakarta.persistence.Entity;
import jakarta.persistence.MappedSuperclass; import jakarta.persistence.MappedSuperclass;
@ -29,6 +30,7 @@ import org.springframework.core.io.ResourceLoader;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.type.filter.AnnotationTypeFilter; import org.springframework.core.type.filter.AnnotationTypeFilter;
import java.io.Closeable;
import java.io.File; import java.io.File;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
@ -125,19 +127,8 @@ public class DdlGeneratorHibernate61 {
writeContentsToFile(nextDialect.getAppendFile(), classLoader, outputFile); writeContentsToFile(nextDialect.getAppendFile(), classLoader, outputFile);
} }
}
private static void writeContentsToFile(String prependFile, ClassLoader classLoader, File outputFile) IoUtil.closeQuietly(connectionProvider);
throws MojoFailureException {
if (isNotBlank(prependFile)) {
ResourceLoader loader = new DefaultResourceLoader(classLoader);
Resource resource = loader.getResource(prependFile);
try (Writer w = new FileWriter(outputFile, true)) {
w.append(resource.getContentAsString(StandardCharsets.UTF_8));
} catch (IOException e) {
throw new MojoFailureException("Failed to write to file " + outputFile + ": " + e.getMessage(), e);
}
}
} }
public void setProject(MavenProject theProject) { public void setProject(MavenProject theProject) {
@ -204,18 +195,64 @@ public class DdlGeneratorHibernate61 {
* here. The schema export doesn't actually touch this DB, so it doesn't * here. The schema export doesn't actually touch this DB, so it doesn't
* matter that it doesn't correlate to the specified dialect. * matter that it doesn't correlate to the specified dialect.
*/ */
private static class FakeConnectionConnectionProvider extends UserSuppliedConnectionProviderImpl { private static class FakeConnectionConnectionProvider extends UserSuppliedConnectionProviderImpl
implements Closeable {
private static final long serialVersionUID = 4147495169899817244L; private static final long serialVersionUID = 4147495169899817244L;
private Connection connection;
@Override public FakeConnectionConnectionProvider() {
public Connection getConnection() throws SQLException { try {
ourLog.trace("Using internal driver: {}", org.h2.Driver.class); connection = DriverManager.getConnection("jdbc:h2:mem:tmp", "sa", "sa");
return DriverManager.getConnection("jdbc:h2:mem:tmp", "sa", "sa"); } catch (SQLException e) {
connection = null;
return;
}
/*
* The Oracle Dialect tries to query for any existing sequences, so we need to supply
* a fake empty table to answer that query.
*/
try {
connection.setAutoCommit(true);
connection
.prepareStatement("create table all_sequences (PID bigint not null, primary key (PID))")
.execute();
} catch (SQLException e) {
ourLog.error("Failed to create sequences table", e);
}
} }
@Override @Override
public void closeConnection(Connection conn) throws SQLException { public Connection getConnection() {
conn.close(); ourLog.trace("Using internal driver: {}", org.h2.Driver.class);
return connection;
}
@Override
public void closeConnection(Connection conn) {
// ignore
}
@Override
public void close() throws IOException {
try {
connection.close();
} catch (SQLException e) {
throw new IOException(e);
}
}
}
private static void writeContentsToFile(String prependFile, ClassLoader classLoader, File outputFile)
throws MojoFailureException {
if (isNotBlank(prependFile)) {
ResourceLoader loader = new DefaultResourceLoader(classLoader);
Resource resource = loader.getResource(prependFile);
try (Writer w = new FileWriter(outputFile, true)) {
w.append(resource.getContentAsString(StandardCharsets.UTF_8));
} catch (IOException e) {
throw new MojoFailureException("Failed to write to file " + outputFile + ": " + e.getMessage(), e);
}
} }
} }
} }

View File

@ -28,13 +28,13 @@ public class GenerateDdlMojo extends AbstractMojo {
private static final Logger ourLog = LoggerFactory.getLogger(GenerateDdlMojo.class); private static final Logger ourLog = LoggerFactory.getLogger(GenerateDdlMojo.class);
@Parameter @Parameter
private List<String> packageNames; List<String> packageNames;
@Parameter @Parameter
private List<Dialect> dialects; List<Dialect> dialects;
@Parameter @Parameter
private String outputDirectory; String outputDirectory;
@Parameter(defaultValue = "${project}", readonly = true) @Parameter(defaultValue = "${project}", readonly = true)
private transient MavenProject project; private transient MavenProject project;
@ -70,18 +70,20 @@ public class GenerateDdlMojo extends AbstractMojo {
public static void main(String[] args) throws MojoExecutionException, MojoFailureException { public static void main(String[] args) throws MojoExecutionException, MojoFailureException {
/* /*
* Note, to execute this, add the following snippet to this module's POM. The whole project won't work with * Note, to execute this for real entities, add the following snippet to this module's POM. The whole project won't work with
* that added, but you can add it temporarily in order to debug this in IJ: * that added, but you can add it temporarily in order to debug this in IJ:
* <dependency> * <dependency>
* <groupId>ca.uhn.hapi.fhir</groupId> * <groupId>ca.uhn.hapi.fhir</groupId>
* <artifactId>hapi-fhir-jpaserver-model</artifactId> * <artifactId>hapi-fhir-jpaserver-model</artifactId>
* <version>${project.version}</version> * <version>${project.version}</version>
* </dependency> * </dependency>
*
* Alternately, there is a unit test with fake entities that also runs this class.
*/ */
GenerateDdlMojo m = new GenerateDdlMojo(); GenerateDdlMojo m = new GenerateDdlMojo();
m.packageNames = List.of("ca.uhn.fhir.jpa.model.entity"); m.packageNames = List.of("ca.uhn.fhir.jpa.model.entity");
m.outputDirectory = "hapi-tinder-plugin/target"; m.outputDirectory = "hapi-tinder-plugin/target";
m.dialects = List.of(new Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect", "h2.sql")); m.dialects = List.of(new Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect", "postgres.sql"));
m.execute(); m.execute();
} }

View File

@ -0,0 +1,46 @@
package ca.uhn.fhir.tinder.ddl;
import org.apache.commons.io.FileUtils;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Locale;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
class GenerateDdlMojoTest {
@Test
public void testGenerateSequences() throws MojoExecutionException, MojoFailureException, IOException {
GenerateDdlMojo m = new GenerateDdlMojo();
m.packageNames = List.of("ca.uhn.fhir.tinder.ddl.test");
m.outputDirectory = "target/generate-ddl-plugin-test/";
m.dialects = List.of(
new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect", "h2.sql"),
new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect", "postgres.sql"),
new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirOracleDialect", "oracle.sql"),
new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirSQLServerDialect", "sqlserver.sql")
);
m.execute();
verifySequence("sqlserver.sql");
verifySequence("oracle.sql");
verifySequence("postgres.sql");
verifySequence("h2.sql");
}
private static void verifySequence(String fileName) throws IOException {
String contents = FileUtils.readFileToString(new File("target/generate-ddl-plugin-test/" + fileName), StandardCharsets.UTF_8).toUpperCase(Locale.ROOT);
assertThat(fileName, contents, containsString("CREATE SEQUENCE"));
}
}

View File

@ -0,0 +1,25 @@
package ca.uhn.fhir.tinder.ddl.test;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
@Table()
@Entity()
public class ExampleEntity {
@Id
@SequenceGenerator(name = "SEQ_RESOURCE_HISTORY_ID", sequenceName = "SEQ_RESOURCE_HISTORY_ID")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RESOURCE_HISTORY_ID")
@Column(name = "PID")
private Long myId;
@Column(name = "RES_ID", nullable = false, updatable = false, insertable = false)
private Long myResourceId;
}

18
pom.xml
View File

@ -2188,7 +2188,21 @@
<groupId>org.testcontainers</groupId> <groupId>org.testcontainers</groupId>
<artifactId>testcontainers</artifactId> <artifactId>testcontainers</artifactId>
<version>${testcontainers_version}</version> <version>${testcontainers_version}</version>
<scope>test</scope> </dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>postgresql</artifactId>
<version>${testcontainers_version}</version>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>mssqlserver</artifactId>
<version>${testcontainers_version}</version>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>oracle-xe</artifactId>
<version>${testcontainers_version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.testcontainers</groupId> <groupId>org.testcontainers</groupId>
@ -2343,7 +2357,7 @@
<maxmem>2000m</maxmem> <maxmem>2000m</maxmem>
<compilerArgs> <compilerArgs>
<arg>-XDcompilePolicy=simple</arg> <arg>-XDcompilePolicy=simple</arg>
<arg>-Xplugin:ErrorProne -Xep:MissingSummary:OFF</arg> <arg>-Xplugin:ErrorProne -Xep:MissingSummary:OFF -XepExcludedPaths:.*/src/test/java/.*</arg>
<arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED</arg> <arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED</arg>
<arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED</arg> <arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED</arg>
<arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED</arg> <arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED</arg>