diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SleepUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SleepUtil.java
new file mode 100644
index 00000000000..635bfae71bc
--- /dev/null
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SleepUtil.java
@@ -0,0 +1,31 @@
+package ca.uhn.fhir.util;
+
+/**
+ * A utility class for thread sleeps.
+ * Uses non-static methods for easier mocking and unnecessary waits in unit tests
+ */
+public class SleepUtil {
+ private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SleepUtil.class);
+
+ public void sleepAtLeast(long theMillis) {
+ sleepAtLeast(theMillis, true);
+ }
+
+ @SuppressWarnings("BusyWait")
+ public void sleepAtLeast(long theMillis, boolean theLogProgress) {
+ long start = System.currentTimeMillis();
+ while (System.currentTimeMillis() <= start + theMillis) {
+ try {
+ long timeSinceStarted = System.currentTimeMillis() - start;
+ long timeToSleep = Math.max(0, theMillis - timeSinceStarted);
+ if (theLogProgress) {
+ ourLog.info("Sleeping for {}ms", timeToSleep);
+ }
+ Thread.sleep(timeToSleep);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ ourLog.error("Interrupted", e);
+ }
+ }
+ }
+}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TestUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TestUtil.java
index 36ff4c3f536..32b725f8f4c 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TestUtil.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TestUtil.java
@@ -31,6 +31,9 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
public class TestUtil {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TestUtil.class);
+
+ private static SleepUtil ourSleepUtil = new SleepUtil();
+
private static boolean ourShouldRandomizeTimezones = true;
public static void setShouldRandomizeTimezones(boolean theShouldRandomizeTimezones) {
@@ -135,25 +138,22 @@ public class TestUtil {
return stripReturns(theString).replace(" ", "");
}
+ /**
+ *
+ * In production code, instead of this static method, it is better to use an instance of SleepUtil.
+ * Since SleepUtil isn't using static methods, it is easier to mock for unit test and avoid unnecessary waits in
+ * unit tests
+ */
public static void sleepAtLeast(long theMillis) {
- sleepAtLeast(theMillis, true);
+ ourSleepUtil.sleepAtLeast(theMillis);
}
- @SuppressWarnings("BusyWait")
+ /**
+ * In production code, instead of this static method, it is better to use an instance of SleepUtil.
+ * Since SleepUtil isn't using static methods, it is easier to mock for unit test and avoid unnecessary waits in
+ * unit tests
+ */
public static void sleepAtLeast(long theMillis, boolean theLogProgress) {
- long start = System.currentTimeMillis();
- while (System.currentTimeMillis() <= start + theMillis) {
- try {
- long timeSinceStarted = System.currentTimeMillis() - start;
- long timeToSleep = Math.max(0, theMillis - timeSinceStarted);
- if (theLogProgress) {
- ourLog.info("Sleeping for {}ms", timeToSleep);
- }
- Thread.sleep(timeToSleep);
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- ourLog.error("Interrupted", e);
- }
- }
+ ourSleepUtil.sleepAtLeast(theMillis, theLogProgress);
}
}
diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/SleepUtilTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/SleepUtilTest.java
new file mode 100644
index 00000000000..fb6cd85788c
--- /dev/null
+++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/SleepUtilTest.java
@@ -0,0 +1,29 @@
+package ca.uhn.fhir.util;
+
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
+
+class SleepUtilTest {
+
+ @Test
+ public void testSleepAtLeast() {
+ SleepUtil sleepUtil = new SleepUtil();
+ long amountToSleepMs = 10;
+
+ long start = System.currentTimeMillis();
+ sleepUtil.sleepAtLeast(amountToSleepMs);
+ long stop = System.currentTimeMillis();
+
+ long actualSleepDurationMs = stop - start;
+ assertTrue(actualSleepDurationMs >= amountToSleepMs);
+ }
+
+ @Test
+ public void testZeroMs() {
+ // 0 is a valid input
+ SleepUtil sleepUtil = new SleepUtil();
+ sleepUtil.sleepAtLeast(0);
+ }
+
+}
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5553-deadlocks-on-mdm-clear.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5553-deadlocks-on-mdm-clear.yaml
new file mode 100644
index 00000000000..fe9f7a0bfd5
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5553-deadlocks-on-mdm-clear.yaml
@@ -0,0 +1,5 @@
+---
+type: fix
+issue: 5553
+title: "mdm-clear jobs are prone to failing because of deadlocks when running on SQL Server. Such job failures have
+been mitigated to some extent by increasing the retries on deadlocks."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5555-avoid-resource-lob-column.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5555-avoid-resource-lob-column.yaml
new file mode 100644
index 00000000000..8ae0b2fab63
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5555-avoid-resource-lob-column.yaml
@@ -0,0 +1,10 @@
+---
+type: perf
+issue: 5555
+title: "Previously, resource body content went into one of 2 columns on the HFJ_RES_VER table:
+ RES_TEXT if the size was above a configurable threshold, or RES_TEXT_VC if it was below that
+ threshold. Performance testing has shown that the latter is always faster, and that on
+ Postgres the use of the latter is particularly problematic since it maps to the
+ largeobject table which isn't the recommended way of storing high frequency objects.
+ The configurable threshold is now ignored, and the latter column is always used. Any legacy
+ data in the former column will still be read however."
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirCockroachDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirCockroachDialect.java
similarity index 100%
rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirCockroachDialect.java
rename to hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirCockroachDialect.java
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirDerbyDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirDerbyDialect.java
similarity index 100%
rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirDerbyDialect.java
rename to hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirDerbyDialect.java
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirH2Dialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirH2Dialect.java
similarity index 95%
rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirH2Dialect.java
rename to hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirH2Dialect.java
index 5cdf1eaad26..12e2f9af6be 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirH2Dialect.java
+++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirH2Dialect.java
@@ -19,7 +19,6 @@
*/
package ca.uhn.fhir.jpa.model.dialect;
-import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.H2Dialect;
@@ -38,7 +37,7 @@ public class HapiFhirH2Dialect extends H2Dialect {
/**
* As of Hibernate 6, generated schemas include a column level check constraint that enforces valid values
- * for columns that back an Enum type. For example, the column definition for {@link ResourceTable#getFhirVersion()}
+ * for columns that back an Enum type. For example, the column definition for ResourceTable#getFhirVersion()
* would look like:
*
* RES_VERSION varchar(7) check (RES_VERSION in ('DSTU2','DSTU2_HL7ORG','DSTU2_1','DSTU3','R4','R4B','R5')), diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMariaDBDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMariaDBDialect.java similarity index 100% rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMariaDBDialect.java rename to hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMariaDBDialect.java diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMySQLDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMySQLDialect.java similarity index 100% rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMySQLDialect.java rename to hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirMySQLDialect.java diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirOracleDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirOracleDialect.java similarity index 100% rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirOracleDialect.java rename to hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirOracleDialect.java diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgres94Dialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgres94Dialect.java similarity index 100% rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgres94Dialect.java rename to hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgres94Dialect.java diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgresDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgresDialect.java similarity index 100% rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgresDialect.java rename to hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgresDialect.java diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirSQLServerDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirSQLServerDialect.java similarity index 100% rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirSQLServerDialect.java rename to hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirSQLServerDialect.java diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/HapiEntityManagerFactoryUtil.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/HapiEntityManagerFactoryUtil.java index 1a28f92db97..fe328bb5e1e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/HapiEntityManagerFactoryUtil.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/HapiEntityManagerFactoryUtil.java @@ -48,8 +48,10 @@ public final class HapiEntityManagerFactoryUtil { ConfigurableListableBeanFactory myConfigurableListableBeanFactory, FhirContext theFhirContext, JpaStorageSettings theStorageSettings) { + LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean(myConfigurableListableBeanFactory); + configureEntityManagerFactory(retVal, theFhirContext, theStorageSettings); return retVal; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java index 3e23911c980..90ac47f982d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java @@ -148,9 +148,7 @@ import org.springframework.transaction.support.TransactionSynchronization; import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.transaction.support.TransactionTemplate; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; @@ -645,7 +643,6 @@ public abstract class BaseHapiFhirDaoextends BaseStora theEntity.setResourceType(toResourceName(theResource)); } - byte[] resourceBinary; String resourceText; ResourceEncodingEnum encoding; boolean changed = false; @@ -662,7 +659,6 @@ public abstract class BaseHapiFhirDao extends BaseStora if (address != null) { encoding = ResourceEncodingEnum.ESR; - resourceBinary = null; resourceText = address.getProviderId() + ":" + address.getLocation(); changed = true; @@ -680,19 +676,9 @@ public abstract class BaseHapiFhirDao extends BaseStora theEntity.setFhirVersion(myContext.getVersion().getVersion()); HashFunction sha256 = Hashing.sha256(); - HashCode hashCode; - String encodedResource = encodeResource(theResource, encoding, excludeElements, myContext); - if (myStorageSettings.getInlineResourceTextBelowSize() > 0 - && encodedResource.length() < myStorageSettings.getInlineResourceTextBelowSize()) { - resourceText = encodedResource; - resourceBinary = null; - encoding = ResourceEncodingEnum.JSON; - hashCode = sha256.hashUnencodedChars(encodedResource); - } else { - resourceText = null; - resourceBinary = getResourceBinary(encoding, encodedResource); - hashCode = sha256.hashBytes(resourceBinary); - } + resourceText = encodeResource(theResource, encoding, excludeElements, myContext); + encoding = ResourceEncodingEnum.JSON; + HashCode hashCode = sha256.hashUnencodedChars(resourceText); String hashSha256 = hashCode.toString(); if (!hashSha256.equals(theEntity.getHashSha256())) { @@ -710,7 +696,6 @@ public abstract class BaseHapiFhirDao extends BaseStora } else { encoding = null; - resourceBinary = null; resourceText = null; } @@ -728,7 +713,6 @@ public abstract class BaseHapiFhirDao extends BaseStora changed = true; } - resourceBinary = null; resourceText = null; encoding = ResourceEncodingEnum.DEL; } @@ -753,46 +737,19 @@ public abstract class BaseHapiFhirDao extends BaseStora if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) { changed = true; } else { - changed = !Arrays.equals(currentHistoryVersion.getResource(), resourceBinary); + changed = !StringUtils.equals(currentHistoryVersion.getResourceTextVc(), resourceText); } } } EncodedResource retVal = new EncodedResource(); retVal.setEncoding(encoding); - retVal.setResourceBinary(resourceBinary); retVal.setResourceText(resourceText); retVal.setChanged(changed); return retVal; } - /** - * helper for returning the encoded byte array of the input resource string based on the encoding. - * - * @param encoding the encoding to used - * @param encodedResource the resource to encode - * @return byte array of the resource - */ - @Nonnull - private byte[] getResourceBinary(ResourceEncodingEnum encoding, String encodedResource) { - byte[] resourceBinary; - switch (encoding) { - case JSON: - resourceBinary = encodedResource.getBytes(StandardCharsets.UTF_8); - break; - case JSONC: - resourceBinary = GZipUtil.compress(encodedResource); - break; - default: - case DEL: - case ESR: - resourceBinary = new byte[0]; - break; - } - return resourceBinary; - } - /** * helper to format the meta element for serialization of the resource. * @@ -1437,8 +1394,7 @@ public abstract class BaseHapiFhirDao extends BaseStora List excludeElements = new ArrayList<>(8); getExcludedElements(historyEntity.getResourceType(), excludeElements, theResource.getMeta()); String encodedResourceString = encodeResource(theResource, encoding, excludeElements, myContext); - byte[] resourceBinary = getResourceBinary(encoding, encodedResourceString); - boolean changed = !Arrays.equals(historyEntity.getResource(), resourceBinary); + boolean changed = !StringUtils.equals(historyEntity.getResourceTextVc(), encodedResourceString); historyEntity.setUpdated(theTransactionDetails.getTransactionDate()); @@ -1450,19 +1406,14 @@ public abstract class BaseHapiFhirDao extends BaseStora return historyEntity; } - if (getStorageSettings().getInlineResourceTextBelowSize() > 0 - && encodedResourceString.length() < getStorageSettings().getInlineResourceTextBelowSize()) { - populateEncodedResource(encodedResource, encodedResourceString, null, ResourceEncodingEnum.JSON); - } else { - populateEncodedResource(encodedResource, null, resourceBinary, encoding); - } + populateEncodedResource(encodedResource, encodedResourceString, ResourceEncodingEnum.JSON); } + /* * Save the resource itself to the resourceHistoryTable */ historyEntity = myEntityManager.merge(historyEntity); historyEntity.setEncoding(encodedResource.getEncoding()); - historyEntity.setResource(encodedResource.getResourceBinary()); historyEntity.setResourceTextVc(encodedResource.getResourceText()); myResourceHistoryTableDao.save(historyEntity); @@ -1472,12 +1423,8 @@ public abstract class BaseHapiFhirDao extends BaseStora } private void populateEncodedResource( - EncodedResource encodedResource, - String encodedResourceString, - byte[] theResourceBinary, - ResourceEncodingEnum theEncoding) { + EncodedResource encodedResource, String encodedResourceString, ResourceEncodingEnum theEncoding) { encodedResource.setResourceText(encodedResourceString); - encodedResource.setResourceBinary(theResourceBinary); encodedResource.setEncoding(theEncoding); } @@ -1542,7 +1489,6 @@ public abstract class BaseHapiFhirDao extends BaseStora } historyEntry.setEncoding(theChanged.getEncoding()); - historyEntry.setResource(theChanged.getResourceBinary()); historyEntry.setResourceTextVc(theChanged.getResourceText()); ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java index dbb35bb4967..7120599b146 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java @@ -1689,19 +1689,17 @@ public abstract class BaseHapiFhirResourceDao extends B if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC || historyEntity.getEncoding() == ResourceEncodingEnum.JSON) { byte[] resourceBytes = historyEntity.getResource(); + + // Always migrate data out of the bytes column if (resourceBytes != null) { String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding()); - if (myStorageSettings.getInlineResourceTextBelowSize() > 0 - && resourceText.length() < myStorageSettings.getInlineResourceTextBelowSize()) { - ourLog.debug( - "Storing text of resource {} version {} as inline VARCHAR", - entity.getResourceId(), - historyEntity.getVersion()); - historyEntity.setResourceTextVc(resourceText); - historyEntity.setResource(null); - historyEntity.setEncoding(ResourceEncodingEnum.JSON); - changed = true; - } + ourLog.debug( + "Storing text of resource {} version {} as inline VARCHAR", + entity.getResourceId(), + historyEntity.getVersion()); + historyEntity.setResourceTextVc(resourceText); + historyEntity.setEncoding(ResourceEncodingEnum.JSON); + changed = true; } } if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) { @@ -2071,6 +2069,7 @@ public abstract class BaseHapiFhirResourceDao extends B }); } + @Override public > Stream searchForIdStream( SearchParameterMap theParams, RequestDetails theRequest, diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/EncodedResource.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/EncodedResource.java index 7695b44ca13..503a85b15d1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/EncodedResource.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/EncodedResource.java @@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum; class EncodedResource { private boolean myChanged; - private byte[] myResource; private ResourceEncodingEnum myEncoding; private String myResourceText; @@ -36,14 +35,6 @@ class EncodedResource { myEncoding = theEncoding; } - public byte[] getResourceBinary() { - return myResource; - } - - public void setResourceBinary(byte[] theResource) { - myResource = theResource; - } - public boolean isChanged() { return myChanged; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java index b74a1b757f2..00eeccb7345 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java @@ -79,4 +79,16 @@ public interface IResourceHistoryTableDao extends JpaRepository RES_TEXT_VC column to the legacy RES_TEXT
column, which is where data may have + * been stored by versions of HAPI FHIR prior to 7.0.0 + * + * @since 7.0.0 + */ + @Modifying + @Query( + "UPDATE ResourceHistoryTable r SET r.myResourceTextVc = null, r.myResource = :text, r.myEncoding = 'JSONC' WHERE r.myId = :pid") + void updateNonInlinedContents(@Param("text") byte[] theText, @Param("pid") long thePid); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index 12cf705407a..dcc7c9b9c6d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -1555,11 +1555,12 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks{ Builder.BuilderWithTableName nrmlTable = version.onTable("HFJ_SPIDX_QUANTITY_NRML"); nrmlTable.addColumn("20210111.1", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); nrmlTable.addColumn("20210111.2", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - // - The fk name is generated from Hibernate, have to use this name here + // Disabled - superceded by 20220304.33 nrmlTable .addForeignKey("20210111.3", "FKRCJOVMUH5KC0O6FVBLE319PYV") .toColumn("RES_ID") - .references("HFJ_RESOURCE", "RES_ID"); + .references("HFJ_RESOURCE", "RES_ID") + .doNothing(); Builder.BuilderWithTableName quantityTable = version.onTable("HFJ_SPIDX_QUANTITY"); quantityTable diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/GeneratedSchemaTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/GeneratedSchemaTest.java new file mode 100644 index 00000000000..bd10ad84d05 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/entity/GeneratedSchemaTest.java @@ -0,0 +1,40 @@ +package ca.uhn.fhir.jpa.entity; + +import ca.uhn.fhir.util.ClasspathUtil; +import org.apache.commons.lang3.StringUtils; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.MatcherAssert.assertThat; + +public class GeneratedSchemaTest { + + /** + * Make sure that the RES_TEXT_VC column, which is supposed to be an unlimited-length + * string datatype, actually uses an appropriate datatype on the various databases + * we care about. + */ + @Test + public void testVerifyLongVarcharColumnDefinition() { + validateLongVarcharDatatype("cockroachdb.sql", "varchar(2147483647)"); + validateLongVarcharDatatype("derby.sql", "clob"); + validateLongVarcharDatatype("mysql.sql", "longtext"); + validateLongVarcharDatatype("mariadb.sql", "longtext"); + + validateLongVarcharDatatype("h2.sql", "clob"); + validateLongVarcharDatatype("postgres.sql", "text"); + validateLongVarcharDatatype("oracle.sql", "clob"); + validateLongVarcharDatatype("sqlserver.sql", "varchar(max)"); + + } + + private static void validateLongVarcharDatatype(String schemaName, String expectedDatatype) { + String schema = ClasspathUtil.loadResource("ca/uhn/hapi/fhir/jpa/docs/database/" + schemaName); + String[] lines = StringUtils.split(schema, '\n'); + String resTextVc = Arrays.stream(lines).filter(t -> t.contains("RES_TEXT_VC ")).findFirst().orElseThrow(); + assertThat("Wrong type in " + schemaName, resTextVc, containsString("RES_TEXT_VC " + expectedDatatype)); + } + +} diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java index 7e951765439..0d1e0f8adaa 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java @@ -25,14 +25,15 @@ import ca.uhn.fhir.rest.api.Constants; import jakarta.persistence.*; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import org.hibernate.annotations.JdbcTypeCode; +import org.hibernate.Length; import org.hibernate.annotations.OptimisticLock; -import org.hibernate.type.SqlTypes; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; +import static org.apache.commons.lang3.StringUtils.defaultString; + @Entity @Table( name = ResourceHistoryTable.HFJ_RES_VER, @@ -57,7 +58,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl public static final int ENCODING_COL_LENGTH = 5; public static final String HFJ_RES_VER = "HFJ_RES_VER"; - public static final int RES_TEXT_VC_MAX_LENGTH = 4000; private static final long serialVersionUID = 1L; @Id @@ -86,13 +86,15 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl @OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true) private Collection myTags; + /** + * Note: No setter for this field because it's only a legacy way of storing data now. + */ @Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = true) @Lob() @OptimisticLock(excluded = true) private byte[] myResource; - @Column(name = "RES_TEXT_VC", length = RES_TEXT_VC_MAX_LENGTH, nullable = true) - @JdbcTypeCode(SqlTypes.LONG32VARCHAR) + @Column(name = "RES_TEXT_VC", nullable = true, length = Length.LONG32) @OptimisticLock(excluded = true) private String myResourceTextVc; @@ -153,7 +155,8 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl } public void setResourceTextVc(String theResourceTextVc) { - myResourceTextVc = theResourceTextVc; + myResource = null; + myResourceTextVc = defaultString(theResourceTextVc); } public ResourceHistoryProvenanceEntity getProvenance() { @@ -209,10 +212,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl return myResource; } - public void setResource(byte[] theResource) { - myResource = theResource; - } - @Override public Long getResourceId() { return myResourceId; diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java index d9208ba6ab2..5462f896a3c 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/imprt2/ConsumeFilesStepR4Test.java @@ -41,14 +41,12 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test { public void before() throws Exception { super.before(); myPartitionSettings.setPartitioningEnabled(false); - myStorageSettings.setInlineResourceTextBelowSize(10000); } @AfterEach @Override public void after() { super.after(); - myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize()); } @Test diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CreateTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CreateTest.java index 1fe09812342..db30329cbad 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CreateTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CreateTest.java @@ -91,7 +91,6 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test { myStorageSettings.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED); myStorageSettings.setIndexOnContainedResources(new JpaStorageSettings().isIndexOnContainedResources()); myStorageSettings.setIndexOnContainedResourcesRecursively(new JpaStorageSettings().isIndexOnContainedResourcesRecursively()); - myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize()); } @Test diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InlineResourceModeTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InlineResourceModeTest.java index 368519202f8..b37be63bfb5 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InlineResourceModeTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4InlineResourceModeTest.java @@ -1,102 +1,61 @@ package ca.uhn.fhir.jpa.dao.r4; -import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; -import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; +import ca.uhn.fhir.jpa.dao.GZipUtil; +import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; +import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.test.BaseJpaR4Test; -import org.apache.commons.lang3.StringUtils; -import org.hl7.fhir.r4.model.IdType; +import ca.uhn.fhir.rest.param.DateRangeParam; +import ca.uhn.fhir.rest.param.HistorySearchDateRangeParam; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Patient; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.HashMap; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.containsString; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; public class FhirResourceDaoR4InlineResourceModeTest extends BaseJpaR4Test { - @BeforeEach - public void beforeSetDao() { - myStorageSettings.setInlineResourceTextBelowSize(5000); - } - - @AfterEach - public void afterResetDao() { - myStorageSettings.setInlineResourceTextBelowSize(new JpaStorageSettings().getInlineResourceTextBelowSize()); - } - @Test - public void testCreateWithInlineResourceTextStorage() { - Patient patient = new Patient(); - patient.setActive(true); - Long resourceId = myPatientDao.create(patient).getId().getIdPartAsLong(); + public void testRetrieveNonInlinedResource() { + IIdType id = createPatient(withActiveTrue()); + Long pid = id.getIdPartAsLong(); - patient = new Patient(); - patient.setId("Patient/" + resourceId); - patient.setActive(false); - myPatientDao.update(patient); + relocateResourceTextToCompressedColumn(pid, 1L); - runInTransaction(() -> { - // Version 1 - ResourceHistoryTable entity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(resourceId, 1); - assertNull(entity.getResource()); - assertThat(entity.getResourceTextVc(), containsString("\"active\":true")); - // Version 2 - entity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(resourceId, 2); - assertNull(entity.getResource()); - assertThat(entity.getResourceTextVc(), containsString("\"active\":false")); + runInTransaction(()->{ + ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(pid, 1); + assertNotNull(historyEntity.getResource()); + assertNull(historyEntity.getResourceTextVc()); + assertEquals(ResourceEncodingEnum.JSONC, historyEntity.getEncoding()); }); - patient = myPatientDao.read(new IdType("Patient/" + resourceId)); - assertFalse(patient.getActive()); + // Read + validatePatient(myPatientDao.read(id.withVersion(null), mySrd)); - patient = (Patient) myPatientDao.search(SearchParameterMap.newSynchronous()).getAllResources().get(0); - assertFalse(patient.getActive()); + // VRead + validatePatient(myPatientDao.read(id.withVersion("1"), mySrd)); + // Search (Sync) + validatePatient(myPatientDao.search(SearchParameterMap.newSynchronous(), mySrd).getResources(0, 1).get(0)); + + // Search (Async) + validatePatient(myPatientDao.search(new SearchParameterMap(), mySrd).getResources(0, 1).get(0)); + + // History + validatePatient(myPatientDao.history(id, new HistorySearchDateRangeParam(new HashMap<>(), new DateRangeParam(), 0), mySrd).getResources(0, 1).get(0)); } - @Test - public void testDontUseInlineAboveThreshold() { - String veryLongFamilyName = StringUtils.leftPad("", 6000, 'a'); - - Patient patient = new Patient(); - patient.setActive(true); - patient.addName().setFamily(veryLongFamilyName); - Long resourceId = myPatientDao.create(patient).getId().getIdPartAsLong(); - - runInTransaction(() -> { - // Version 1 - ResourceHistoryTable entity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(resourceId, 1); - assertNotNull(entity.getResource()); - assertNull(entity.getResourceTextVc()); - }); - - patient = myPatientDao.read(new IdType("Patient/" + resourceId)); - assertEquals(veryLongFamilyName, patient.getNameFirstRep().getFamily()); - } - - - @Test - public void testNopOnUnchangedUpdate() { - Patient patient = new Patient(); - patient.setActive(true); - Long resourceId = myPatientDao.create(patient).getId().getIdPartAsLong(); - - patient = new Patient(); - patient.setId("Patient/" + resourceId); - patient.setActive(true); - DaoMethodOutcome updateOutcome = myPatientDao.update(patient); - assertEquals("1", updateOutcome.getId().getVersionIdPart()); - assertTrue(updateOutcome.isNop()); - + private void validatePatient(IBaseResource theRead) { + assertTrue(((Patient)theRead).getActive()); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java index fb828747880..a65e6075ca3 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java @@ -1031,26 +1031,15 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test @ParameterizedTest @CsvSource({ - // NoOp OptimisticLock OptimizeMode ExpectedSelect ExpectedUpdate - " false, false, CURRENT_VERSION, 2, 1", - " true, false, CURRENT_VERSION, 2, 0", - " false, true, CURRENT_VERSION, 12, 1", - " true, true, CURRENT_VERSION, 12, 0", - " false, false, ALL_VERSIONS, 12, 10", - " true, false, ALL_VERSIONS, 12, 0", - " false, true, ALL_VERSIONS, 22, 10", - " true, true, ALL_VERSIONS, 22, 0", + // OptimisticLock OptimizeMode ExpectedSelect ExpectedUpdate + " false, CURRENT_VERSION, 2, 0", + " true, CURRENT_VERSION, 12, 0", + " false, ALL_VERSIONS, 12, 0", + " true, ALL_VERSIONS, 22, 0", }) - public void testReindexJob_OptimizeStorage(boolean theNoOp, boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) { + public void testReindexJob_OptimizeStorage(boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) { // Setup - // In no-op mode, the inlining is already in the state it needs to be in - if (theNoOp) { - myStorageSettings.setInlineResourceTextBelowSize(10000); - } else { - myStorageSettings.setInlineResourceTextBelowSize(0); - } - ResourceIdListWorkChunkJson data = new ResourceIdListWorkChunkJson(); IIdType patientId = createPatient(withActiveTrue()); for (int i = 0; i < 10; i++) { diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java index 569ff1aad7c..649359d3937 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java @@ -274,7 +274,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test { ResourceHistoryTable newHistory = table.toHistory(true); ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(table.getId(), 1L); newHistory.setEncoding(currentHistory.getEncoding()); - newHistory.setResource(currentHistory.getResource()); + newHistory.setResourceTextVc(currentHistory.getResourceTextVc()); myResourceHistoryTableDao.save(newHistory); }); @@ -2928,7 +2928,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test { ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L); String newContent = myFhirContext.newJsonParser().encodeResourceToString(p); newContent = newContent.replace("male", "foo"); - table.setResource(newContent.getBytes(Charsets.UTF_8)); + table.setResourceTextVc(newContent); table.setEncoding(ResourceEncodingEnum.JSON); myResourceHistoryTableDao.save(table); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java index 9d48781b40b..93dc9dfc6e3 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java @@ -620,11 +620,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { template.execute((TransactionCallback ) t -> { ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), id.getVersionIdPartAsLong()); resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON); - try { - resourceHistoryTable.setResource("{\"resourceType\":\"FOO\"}".getBytes("UTF-8")); - } catch (UnsupportedEncodingException e) { - throw new Error(e); - } + resourceHistoryTable.setResourceTextVc("{\"resourceType\":\"FOO\"}"); myResourceHistoryTableDao.save(resourceHistoryTable); ResourceTable table = myResourceTableDao.findById(id.getIdPartAsLong()).orElseThrow(IllegalStateException::new); @@ -1917,11 +1913,11 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest { Patient p = new Patient(); p.addIdentifier().setSystem("urn:system").setValue(methodName); - myPatientDao.create(p, mySrd).getId(); + myPatientDao.create(p, mySrd); p = new Patient(); p.addIdentifier().setSystem("urn:system").setValue(methodName); - myPatientDao.create(p, mySrd).getId(); + myPatientDao.create(p, mySrd); Observation o = new Observation(); o.getCode().setText("Some Observation"); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java index b53ac1ef305..77cbd0d21ce 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/delete/job/ReindexJobTest.java @@ -87,6 +87,11 @@ public class ReindexJobTest extends BaseJpaR4Test { createPatient(withActiveTrue()); } + // Move resource text to compressed storage, which we don't write to anymore but legacy + // data may exist that was previously stored there, so we're simulating that. + List allHistoryEntities = runInTransaction(() -> myResourceHistoryTableDao.findAll()); + allHistoryEntities.forEach(t->relocateResourceTextToCompressedColumn(t.getResourceId(), t.getVersion())); + runInTransaction(()->{ assertEquals(20, myResourceHistoryTableDao.count()); for (ResourceHistoryTable history : myResourceHistoryTableDao.findAll()) { @@ -141,6 +146,11 @@ public class ReindexJobTest extends BaseJpaR4Test { createPatient(withActiveTrue()); } + // Move resource text to compressed storage, which we don't write to anymore but legacy + // data may exist that was previously stored there, so we're simulating that. + List allHistoryEntities = runInTransaction(() -> myResourceHistoryTableDao.findAll()); + allHistoryEntities.forEach(t->relocateResourceTextToCompressedColumn(t.getResourceId(), t.getVersion())); + runInTransaction(()->{ assertEquals(20, myResourceHistoryTableDao.count()); for (ResourceHistoryTable history : myResourceHistoryTableDao.findAll()) { @@ -149,8 +159,6 @@ public class ReindexJobTest extends BaseJpaR4Test { } }); - myStorageSettings.setInlineResourceTextBelowSize(10000); - // execute JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInvalidDataR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInvalidDataR4Test.java index 52a3e23accf..b16956171b9 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInvalidDataR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInvalidDataR4Test.java @@ -38,11 +38,9 @@ public class ResourceProviderInvalidDataR4Test extends BaseResourceProviderR4Tes // Manually set the value to be an invalid decimal number runInTransaction(() -> { ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id, 1); - byte[] bytesCompressed = resVer.getResource(); - String resourceText = GZipUtil.decompress(bytesCompressed); + String resourceText = resVer.getResourceTextVc(); resourceText = resourceText.replace("100", "-.100"); - bytesCompressed = GZipUtil.compress(resourceText); - resVer.setResource(bytesCompressed); + resVer.setResourceTextVc(resourceText); myResourceHistoryTableDao.save(resVer); }); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java index 837a41b68a2..6c28d39348a 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java @@ -54,6 +54,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor; import ca.uhn.fhir.util.ClasspathUtil; import ca.uhn.fhir.util.StopWatch; +import ca.uhn.fhir.util.TestUtil; import ca.uhn.fhir.util.UrlUtil; import com.google.common.base.Charsets; import com.google.common.collect.Lists; @@ -6723,7 +6724,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test { // Update Patient after delay int delayInMs = 1000; - TimeUnit.MILLISECONDS.sleep(delayInMs); + TestUtil.sleepAtLeast(delayInMs + 100); patient.getNameFirstRep().addGiven("Bob"); myClient.update().resource(patient).execute(); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java index 67aeca52c42..ec9d5d8a4d5 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java @@ -397,6 +397,11 @@ public class GiantTransactionPerfTest { throw new UnsupportedOperationException(); } + @Override + public void updateNonInlinedContents(byte[] theText, long thePid) { + throw new UnsupportedOperationException(); + } + @Nonnull @Override public List findAll() { diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml index df1d9fc41e3..eb2a7a64796 100644 --- a/hapi-fhir-jpaserver-test-r5/pom.xml +++ b/hapi-fhir-jpaserver-test-r5/pom.xml @@ -34,6 +34,20 @@ test + ++ +junit +junit +4.13.2 +provided ++ ++ +org.hamcrest +hamcrest-core +diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/BaseDatabaseVerificationIT.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/BaseDatabaseVerificationIT.java new file mode 100644 index 00000000000..bfc75f33643 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/BaseDatabaseVerificationIT.java @@ -0,0 +1,148 @@ +package ca.uhn.fhir.jpa.dao.r5.database; + +import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.embedded.JpaEmbeddedDatabase; +import ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc; +import ca.uhn.fhir.jpa.migrate.MigrationTaskList; +import ca.uhn.fhir.jpa.migrate.SchemaMigrator; +import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao; +import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks; +import ca.uhn.fhir.jpa.test.config.TestR5Config; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; +import ca.uhn.fhir.util.VersionEnum; +import jakarta.persistence.EntityManagerFactory; +import org.apache.commons.lang3.StringUtils; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.r5.model.Patient; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.envers.repository.support.EnversRevisionRepositoryFactoryBean; +import org.springframework.data.jpa.repository.config.EnableJpaRepositories; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import javax.sql.DataSource; +import java.util.Properties; +import java.util.Set; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +@ExtendWith(SpringExtension.class) +@EnableJpaRepositories(repositoryFactoryBeanClass = EnversRevisionRepositoryFactoryBean.class) +@ContextConfiguration(classes = {BaseDatabaseVerificationIT.TestConfig.class}) +public abstract class BaseDatabaseVerificationIT { + private static final Logger ourLog = LoggerFactory.getLogger(BaseDatabaseVerificationIT.class); + private static final String MIGRATION_TABLENAME = "MIGRATIONS"; + + @Autowired + EntityManagerFactory myEntityManagerFactory; + + @Autowired + JpaEmbeddedDatabase myJpaEmbeddedDatabase; + + @Autowired + IFhirResourceDao myPatientDao; + + + @ParameterizedTest + @ValueSource(ints = {10, 100000}) + public void testCreateRead(int theSize) { + String name = StringUtils.leftPad("", theSize, "a"); + + Patient patient = new Patient(); + patient.setActive(true); + patient.addName().setFamily(name); + IIdType id = myPatientDao.create(patient, new SystemRequestDetails()).getId(); + + Patient actual = myPatientDao.read(id, new SystemRequestDetails()); + assertEquals(name, actual.getName().get(0).getFamily()); + } + + + @Test + public void testDelete() { + Patient patient = new Patient(); + patient.setActive(true); + IIdType id = myPatientDao.create(patient, new SystemRequestDetails()).getId().toUnqualifiedVersionless(); + + myPatientDao.delete(id, new SystemRequestDetails()); + + assertThrows(ResourceGoneException.class, () -> myPatientDao.read(id, new SystemRequestDetails())); + } + + + @Configuration + public static class TestConfig extends TestR5Config { + + @Autowired + private JpaDatabaseContextConfigParamObject myJpaDatabaseContextConfigParamObject; + + @Override + @Bean + public DataSource dataSource() { + DataSource dataSource = myJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase().getDataSource(); + + HapiMigrationDao hapiMigrationDao = new HapiMigrationDao(dataSource, myJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase().getDriverType(), MIGRATION_TABLENAME); + HapiMigrationStorageSvc hapiMigrationStorageSvc = new HapiMigrationStorageSvc(hapiMigrationDao); + + MigrationTaskList tasks = new HapiFhirJpaMigrationTasks(Set.of()).getAllTasks(VersionEnum.values()); + + SchemaMigrator schemaMigrator = new SchemaMigrator( + "HAPI FHIR", MIGRATION_TABLENAME, dataSource, new Properties(), tasks, hapiMigrationStorageSvc); + schemaMigrator.setDriverType(myJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase().getDriverType()); + + ourLog.info("About to run migration..."); + schemaMigrator.createMigrationTableIfRequired(); + schemaMigrator.migrate(); + ourLog.info("Migration complete"); + + + return dataSource; + } + + @Bean + public JpaEmbeddedDatabase jpaEmbeddedDatabase(JpaDatabaseContextConfigParamObject theJpaDatabaseContextConfigParamObject) { + return theJpaDatabaseContextConfigParamObject.getJpaEmbeddedDatabase(); + } + + @Override + protected Properties jpaProperties() { + Properties retVal = super.jpaProperties(); + retVal.put("hibernate.hbm2ddl.auto", "none"); + retVal.put("hibernate.dialect", myJpaDatabaseContextConfigParamObject.getDialect()); + return retVal; + } + + } + + public static class JpaDatabaseContextConfigParamObject { + private JpaEmbeddedDatabase myJpaEmbeddedDatabase; + private String myDialect; + + public JpaDatabaseContextConfigParamObject(JpaEmbeddedDatabase theJpaEmbeddedDatabase, String theDialect) { + myJpaEmbeddedDatabase = theJpaEmbeddedDatabase; + myDialect = theDialect; + } + + public JpaEmbeddedDatabase getJpaEmbeddedDatabase() { + return myJpaEmbeddedDatabase; + } + + public String getDialect() { + return myDialect; + } + } + + +} + + diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/DatabaseVerificationWithMsSqlIT.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/DatabaseVerificationWithMsSqlIT.java new file mode 100644 index 00000000000..a6cf07f394d --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/DatabaseVerificationWithMsSqlIT.java @@ -0,0 +1,27 @@ +package ca.uhn.fhir.jpa.dao.r5.database; + +import ca.uhn.fhir.jpa.embedded.MsSqlEmbeddedDatabase; +import ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect; +import ca.uhn.fhir.jpa.model.dialect.HapiFhirSQLServerDialect; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.ContextConfiguration; + +@ContextConfiguration(classes = { + DatabaseVerificationWithMsSqlIT.TestConfig.class +}) +public class DatabaseVerificationWithMsSqlIT extends BaseDatabaseVerificationIT { + + @Configuration + public static class TestConfig { + @Bean + public JpaDatabaseContextConfigParamObject jpaDatabaseParamObject() { + return new JpaDatabaseContextConfigParamObject( + new MsSqlEmbeddedDatabase(), + HapiFhirSQLServerDialect.class.getName() + ); + } + } + + +} diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/DatabaseVerificationWithOracleIT.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/DatabaseVerificationWithOracleIT.java new file mode 100644 index 00000000000..d2c72605d6c --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/DatabaseVerificationWithOracleIT.java @@ -0,0 +1,26 @@ +package ca.uhn.fhir.jpa.dao.r5.database; + +import ca.uhn.fhir.jpa.embedded.OracleEmbeddedDatabase; +import ca.uhn.fhir.jpa.model.dialect.HapiFhirOracleDialect; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.ContextConfiguration; + +@ContextConfiguration(classes = { + DatabaseVerificationWithOracleIT.TestConfig.class +}) +public class DatabaseVerificationWithOracleIT extends BaseDatabaseVerificationIT { + + @Configuration + public static class TestConfig { + @Bean + public JpaDatabaseContextConfigParamObject jpaDatabaseParamObject(){ + return new JpaDatabaseContextConfigParamObject( + new OracleEmbeddedDatabase(), + HapiFhirOracleDialect.class.getName() + ); + } + } + + +} diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/DatabaseVerificationWithPostgresIT.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/DatabaseVerificationWithPostgresIT.java new file mode 100644 index 00000000000..7bf2a5712b9 --- /dev/null +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/database/DatabaseVerificationWithPostgresIT.java @@ -0,0 +1,26 @@ +package ca.uhn.fhir.jpa.dao.r5.database; + +import ca.uhn.fhir.jpa.embedded.PostgresEmbeddedDatabase; +import ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.ContextConfiguration; + +@ContextConfiguration(classes = { + DatabaseVerificationWithPostgresIT.TestConfig.class +}) +public class DatabaseVerificationWithPostgresIT extends BaseDatabaseVerificationIT { + + @Configuration + public static class TestConfig { + @Bean + public JpaDatabaseContextConfigParamObject jpaDatabaseParamObject() { + return new JpaDatabaseContextConfigParamObject( + new PostgresEmbeddedDatabase(), + HapiFhirPostgresDialect.class.getName() + ); + } + } + + +} diff --git a/hapi-fhir-jpaserver-test-utilities/ResourceTable/segments_1 b/hapi-fhir-jpaserver-test-utilities/ResourceTable/segments_1 new file mode 100644 index 00000000000..45ec119a611 Binary files /dev/null and b/hapi-fhir-jpaserver-test-utilities/ResourceTable/segments_1 differ diff --git a/hapi-fhir-jpaserver-test-utilities/ResourceTable/write.lock b/hapi-fhir-jpaserver-test-utilities/ResourceTable/write.lock new file mode 100644 index 00000000000..e69de29bb2d diff --git a/hapi-fhir-jpaserver-test-utilities/TermConcept/segments_1 b/hapi-fhir-jpaserver-test-utilities/TermConcept/segments_1 new file mode 100644 index 00000000000..bebbd510042 Binary files /dev/null and b/hapi-fhir-jpaserver-test-utilities/TermConcept/segments_1 differ diff --git a/hapi-fhir-jpaserver-test-utilities/TermConcept/write.lock b/hapi-fhir-jpaserver-test-utilities/TermConcept/write.lock new file mode 100644 index 00000000000..e69de29bb2d diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index 9a2166c22ea..225aae4f29f 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -131,20 +131,14 @@ org.testcontainers postgresql -1.17.6 -compile org.testcontainers mssqlserver -1.17.6 -compile org.testcontainers oracle-xe -1.17.6 -compile org.postgresql diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/JpaEmbeddedDatabase.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/JpaEmbeddedDatabase.java index ae26426bdee..f87d1d25d14 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/JpaEmbeddedDatabase.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/JpaEmbeddedDatabase.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.embedded; import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; +import jakarta.annotation.PreDestroy; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -53,6 +54,7 @@ public abstract class JpaEmbeddedDatabase { private JdbcTemplate myJdbcTemplate; private Connection myConnection; + @PreDestroy public abstract void stop(); public abstract void disableConstraints(); @@ -116,7 +118,7 @@ public abstract class JpaEmbeddedDatabase { for (String sql : theStatements) { if (!StringUtils.isBlank(sql)) { statement.addBatch(sql); - ourLog.info("Added to batch: {}", sql); + ourLog.debug("Added to batch: {}", sql); } } statement.executeBatch(); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java index 68813b7c4d5..9515d61bbe7 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java @@ -40,6 +40,7 @@ import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor; import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper; +import ca.uhn.fhir.jpa.dao.GZipUtil; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository; import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository; @@ -82,6 +83,7 @@ import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetConcept; import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.packages.IPackageInstallerSvc; import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; import ca.uhn.fhir.jpa.provider.JpaSystemProvider; @@ -663,6 +665,14 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil return myTxManager; } + protected void relocateResourceTextToCompressedColumn(Long theResourcePid, Long theVersion) { + runInTransaction(()->{ + ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theResourcePid, theVersion); + byte[] contents = GZipUtil.compress(historyEntity.getResourceTextVc()); + myResourceHistoryTableDao.updateNonInlinedContents(contents, historyEntity.getId()); + }); + } + protected ValidationResult validateWithResult(IBaseResource theResource) { FhirValidator validatorModule = myFhirContext.newValidator(); FhirInstanceValidator instanceValidator = new FhirInstanceValidator(myValidationSupport); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java index 10ca0859722..828bd153d1c 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java @@ -186,7 +186,7 @@ public class TestR5Config { return retVal; } - private Properties jpaProperties() { + protected Properties jpaProperties() { Properties extraProperties = new Properties(); extraProperties.put("hibernate.format_sql", "false"); extraProperties.put("hibernate.show_sql", "false"); diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeEnum.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeEnum.java index 8c894e824c6..d1306fbe103 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeEnum.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeEnum.java @@ -34,7 +34,7 @@ public enum ColumnTypeEnum { /** * Unlimited length text, with a column definition containing the annotation: - *@JdbcTypeCode(SqlTypes.LONG32VARCHAR)
+ *@Column(length=Integer.MAX_VALUE)
*/ TEXT, BIG_DECIMAL; diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeToDriverTypeToSqlType.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeToDriverTypeToSqlType.java index 97ec7c8c673..f64c34554c0 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeToDriverTypeToSqlType.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeToDriverTypeToSqlType.java @@ -62,7 +62,7 @@ public final class ColumnTypeToDriverTypeToSqlType { setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.MYSQL_5_7, "double precision"); setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.MSSQL_2012, "double precision"); setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.ORACLE_12C, "double precision"); - setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.POSTGRES_9_4, "float8"); + setColumnType(ColumnTypeEnum.DOUBLE, DriverTypeEnum.POSTGRES_9_4, "double precision"); setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.H2_EMBEDDED, "bigint"); setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.DERBY_EMBEDDED, "bigint"); @@ -123,7 +123,7 @@ public final class ColumnTypeToDriverTypeToSqlType { "oid"); // the PG driver will write oid into a `text` column setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.MSSQL_2012, "varchar(MAX)"); - setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.H2_EMBEDDED, "character large object"); + setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.H2_EMBEDDED, "clob"); setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.DERBY_EMBEDDED, "clob"); setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.MARIADB_10_1, "longtext"); setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.MYSQL_5_7, "longtext"); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java index 53faa3d2c6c..44f05007c05 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java @@ -267,14 +267,6 @@ public class JpaStorageSettings extends StorageSettings { * @since 5.6.0 */ private boolean myAdvancedHSearchIndexing = false; - /** - * If set to a positive number, any resources with a character length at or below the given number - * of characters will be stored inline in theHFJ_RES_VER
table instead of using a - * separate LOB column. - * - * @since 5.7.0 - */ - private int myInlineResourceTextBelowSize = 0; /** * @since 5.7.0 @@ -381,25 +373,21 @@ public class JpaStorageSettings extends StorageSettings { } /** - * If set to a positive number, any resources with a character length at or below the given number - * of characters will be stored inline in theHFJ_RES_VER
table instead of using a - * separate LOB column. - * * @since 5.7.0 + * @deprecated This setting no longer does anything as of HAPI FHIR 7.0.0 */ + @Deprecated public int getInlineResourceTextBelowSize() { - return myInlineResourceTextBelowSize; + return 0; } /** - * If set to a positive number, any resources with a character length at or below the given number - * of characters will be stored inline in theHFJ_RES_VER
table instead of using a - * separate LOB column. - * * @since 5.7.0 + * @deprecated This setting no longer does anything as of HAPI FHIR 7.0.0 */ + @Deprecated public void setInlineResourceTextBelowSize(int theInlineResourceTextBelowSize) { - myInlineResourceTextBelowSize = theInlineResourceTextBelowSize; + // ignored } /** diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java index 990ef103309..719d8891b34 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java @@ -36,7 +36,7 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import ca.uhn.fhir.util.ICallable; -import ca.uhn.fhir.util.TestUtil; +import ca.uhn.fhir.util.SleepUtil; import com.google.common.annotations.VisibleForTesting; import jakarta.annotation.Nonnull; import jakarta.annotation.Nullable; @@ -48,6 +48,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.dao.PessimisticLockingFailureException; import org.springframework.orm.ObjectOptimisticLockingFailureException; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.TransactionStatus; @@ -89,11 +90,18 @@ public class HapiTransactionService implements IHapiTransactionService { private Propagation myTransactionPropagationWhenChangingPartitions = Propagation.REQUIRED; + private SleepUtil mySleepUtil = new SleepUtil(); + @VisibleForTesting public void setInterceptorBroadcaster(IInterceptorBroadcaster theInterceptorBroadcaster) { myInterceptorBroadcaster = theInterceptorBroadcaster; } + @VisibleForTesting + public void setSleepUtil(SleepUtil theSleepUtil) { + mySleepUtil = theSleepUtil; + } + @Override public IExecutionBuilder withRequest(@Nullable RequestDetails theRequestDetails) { return new ExecutionBuilder(theRequestDetails); @@ -281,6 +289,25 @@ public class HapiTransactionService implements IHapiTransactionService { return doExecuteInTransaction(theExecutionBuilder, theCallback, requestPartitionId, previousRequestPartitionId); } + private boolean isThrowableOrItsSubclassPresent(Throwable theThrowable, Class extends Throwable> theClass) { + return ExceptionUtils.indexOfType(theThrowable, theClass) != -1; + } + + private boolean isThrowablePresent(Throwable theThrowable, Class extends Throwable> theClass) { + return ExceptionUtils.indexOfThrowable(theThrowable, theClass) != -1; + } + + private boolean isRetriable(Throwable theThrowable) { + return isThrowablePresent(theThrowable, ResourceVersionConflictException.class) + || isThrowablePresent(theThrowable, DataIntegrityViolationException.class) + || isThrowablePresent(theThrowable, ConstraintViolationException.class) + || isThrowablePresent(theThrowable, ObjectOptimisticLockingFailureException.class) + // calling isThrowableOrItsSubclassPresent instead of isThrowablePresent for + // PessimisticLockingFailureException, because we want to retry on its subclasses as well, especially + // CannotAcquireLockException, which is thrown in some deadlock situations which we want to retry + || isThrowableOrItsSubclassPresent(theThrowable, PessimisticLockingFailureException.class); + } + @Nullable privateT doExecuteInTransaction( ExecutionBuilder theExecutionBuilder, @@ -294,11 +321,7 @@ public class HapiTransactionService implements IHapiTransactionService { return doExecuteCallback(theExecutionBuilder, theCallback); } catch (Exception e) { - if (!(ExceptionUtils.indexOfThrowable(e, ResourceVersionConflictException.class) != -1 - || ExceptionUtils.indexOfThrowable(e, DataIntegrityViolationException.class) != -1 - || ExceptionUtils.indexOfThrowable(e, ConstraintViolationException.class) != -1 - || ExceptionUtils.indexOfThrowable(e, ObjectOptimisticLockingFailureException.class) - != -1)) { + if (!isRetriable(e)) { ourLog.debug("Unexpected transaction exception. Will not be retried.", e); throw e; } else { @@ -354,7 +377,7 @@ public class HapiTransactionService implements IHapiTransactionService { } double sleepAmount = (250.0d * i) * Math.random(); long sleepAmountLong = (long) sleepAmount; - TestUtil.sleepAtLeast(sleepAmountLong, false); + mySleepUtil.sleepAtLeast(sleepAmountLong, false); ourLog.info( "About to start a transaction retry due to conflict or constraint error. Sleeping {}ms first.", diff --git a/hapi-fhir-storage/src/test/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionServiceTest.java b/hapi-fhir-storage/src/test/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionServiceTest.java new file mode 100644 index 00000000000..1547cf2114d --- /dev/null +++ b/hapi-fhir-storage/src/test/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionServiceTest.java @@ -0,0 +1,194 @@ +package ca.uhn.fhir.jpa.dao.tx; + +import ca.uhn.fhir.interceptor.api.HookParams; +import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.jpa.api.model.ResourceVersionConflictResolutionStrategy; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; +import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; +import ca.uhn.fhir.util.SleepUtil; +import org.hibernate.exception.ConstraintViolationException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.dao.CannotAcquireLockException; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.orm.ObjectOptimisticLockingFailureException; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.support.TransactionCallback; + +import java.sql.SQLException; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Stream; + +import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.params.provider.Arguments.arguments; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.isA; +import static org.mockito.Mockito.lenient; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +class HapiTransactionServiceTest { + + @Mock + private IInterceptorBroadcaster myInterceptorBroadcasterMock; + + @Mock + private PlatformTransactionManager myTransactionManagerMock; + + @Mock + private IRequestPartitionHelperSvc myRequestPartitionHelperSvcMock; + @Mock + private PartitionSettings myPartitionSettingsMock; + + @Mock + private SleepUtil mySleepUtilMock; + + private HapiTransactionService myHapiTransactionService; + + + @BeforeEach + public void beforeEach() { + myHapiTransactionService = new HapiTransactionService(); + myHapiTransactionService.setTransactionManager(myTransactionManagerMock); + myHapiTransactionService.setInterceptorBroadcaster(myInterceptorBroadcasterMock); + myHapiTransactionService.setPartitionSettingsForUnitTest(myPartitionSettingsMock); + myHapiTransactionService.setRequestPartitionSvcForUnitTest(myRequestPartitionHelperSvcMock); + myHapiTransactionService.setSleepUtil(mySleepUtilMock); + mockInterceptorBroadcaster(); + } + + private void mockInterceptorBroadcaster() { + lenient().when(myInterceptorBroadcasterMock.callHooksAndReturnObject(eq(Pointcut.STORAGE_VERSION_CONFLICT), + isA(HookParams.class))) + .thenAnswer(invocationOnMock -> { + HookParams hookParams = (HookParams) invocationOnMock.getArguments()[1]; + //answer with whatever retry settings passed in as HookParam + RequestDetails requestDetails = hookParams.get(RequestDetails.class); + ResourceVersionConflictResolutionStrategy answer = new ResourceVersionConflictResolutionStrategy(); + answer.setRetry(requestDetails.isRetry()); + answer.setMaxRetries(requestDetails.getMaxRetries()); + return answer; + }); + } + + + /** + * A helper method to test retry logic on exceptions + * TransactionCallback interface allows only throwing RuntimeExceptions, + * that's why the parameter type is RunTimeException + */ + private Exception testRetriesOnException(RuntimeException theException, + boolean theRetryEnabled, + int theMaxRetries, + int theExpectedNumberOfCallsToTransactionCallback) { + RequestDetails requestDetails = new SystemRequestDetails(); + requestDetails.setRetry(theRetryEnabled); + requestDetails.setMaxRetries(theMaxRetries); + + HapiTransactionService.IExecutionBuilder executionBuilder = myHapiTransactionService + .withRequest(requestDetails) + .withTransactionDetails(new TransactionDetails()); + + AtomicInteger numberOfCalls = new AtomicInteger(); + TransactionCallback transactionCallback = (TransactionStatus theStatus) -> { + numberOfCalls.incrementAndGet(); + throw theException; + }; + + Exception theExceptionThrownByDoExecute = assertThrows(Exception.class, () -> { + myHapiTransactionService.doExecute((HapiTransactionService.ExecutionBuilder) executionBuilder, transactionCallback); + }); + + assertEquals(theExpectedNumberOfCallsToTransactionCallback, numberOfCalls.get()); + verify(mySleepUtilMock, times(theExpectedNumberOfCallsToTransactionCallback - 1)) + .sleepAtLeast(anyLong(), anyBoolean()); + return theExceptionThrownByDoExecute; + } + + private static Stream provideRetriableExceptionParameters() { + String exceptionMessage = "failed!"; + return Stream.of( + arguments(new ResourceVersionConflictException(exceptionMessage)), + arguments(new DataIntegrityViolationException(exceptionMessage)), + arguments(new ConstraintViolationException(exceptionMessage, new SQLException(""), null)), + arguments(new ObjectOptimisticLockingFailureException(exceptionMessage, new Exception())), + //CannotAcquireLockException is a subclass of + //PessimisticLockingFailureException which we treat as a retriable exception + arguments(new CannotAcquireLockException(exceptionMessage)) + ); + } + + @ParameterizedTest(name = "{index}: {0}") + @MethodSource(value = "provideRetriableExceptionParameters") + void testDoExecute_WhenRetryEnabled_RetriesOnRetriableExceptions(RuntimeException theException) { + testRetriesOnException(theException, true, 2, 3); + } + + + @ParameterizedTest(name = "{index}: {0}") + @MethodSource(value = "provideRetriableExceptionParameters") + void testDoExecute_WhenRetryEnabled_RetriesOnRetriableInnerExceptions(RuntimeException theException) { + //in this test we wrap the retriable exception to test that nested exceptions are covered as well + RuntimeException theWrapperException = new RuntimeException("this is the wrapper", theException); + testRetriesOnException(theWrapperException, true, 2, 3); + } + + @ParameterizedTest(name = "{index}: {0}") + @MethodSource(value = "provideRetriableExceptionParameters") + void testDoExecute_WhenRetryIsDisabled_DoesNotRetryExceptions(RuntimeException theException) { + testRetriesOnException(theException, false, 10, 1); + } + + @Test + void testDoExecute_WhenRetryEnabled_DoesNotRetryOnNonRetriableException() { + RuntimeException nonRetriableException = new RuntimeException("should not be retried"); + Exception exceptionThrown = testRetriesOnException(nonRetriableException, true, 10, 1); + assertEquals(nonRetriableException, exceptionThrown); + verifyNoInteractions(myInterceptorBroadcasterMock); + } + + @Test + void testDoExecute_WhenRetyEnabled_StopsRetryingWhenARetryIsSuccessfull() { + RequestDetails requestDetails = new SystemRequestDetails(); + requestDetails.setRetry(true); + requestDetails.setMaxRetries(10); + + HapiTransactionService.IExecutionBuilder executionBuilder = myHapiTransactionService + .withRequest(requestDetails) + .withTransactionDetails(new TransactionDetails()); + + AtomicInteger numberOfCalls = new AtomicInteger(); + TransactionCallback transactionCallback = (TransactionStatus theStatus) -> { + int currentCallNum = numberOfCalls.incrementAndGet(); + //fail for the first two calls then succeed on the third + if (currentCallNum < 3) { + // using ResourceVersionConflictException, since it is a retriable exception + throw new ResourceVersionConflictException("failed"); + } + return null; + }; + + myHapiTransactionService.doExecute((HapiTransactionService.ExecutionBuilder) executionBuilder, transactionCallback); + + assertEquals(3, numberOfCalls.get()); + verify(mySleepUtilMock, times(2)) + .sleepAtLeast(anyLong(), anyBoolean()); + } +} diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/jpa/JpaModelScannerAndVerifier.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/jpa/JpaModelScannerAndVerifier.java index 47810c38c1a..94d4fd93943 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/jpa/JpaModelScannerAndVerifier.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/test/utilities/jpa/JpaModelScannerAndVerifier.java @@ -361,9 +361,6 @@ public class JpaModelScannerAndVerifier { if (!theIsView && column.length() == 255) { throw new IllegalStateException(Msg.code(1626) + "Field does not have an explicit maximum length specified: " + field); } - if (column.length() > MAX_COL_LENGTH) { - throw new IllegalStateException(Msg.code(1627) + "Field is too long: " + field); - } } Size size = theAnnotatedElement.getAnnotation(Size.class); diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index 7346ec210df..310dbbcd3b4 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -184,14 +184,6 @@ org.apache.maven maven-plugin-api -+ org.apache.maven.plugin-tools diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ddl/DdlGeneratorHibernate61.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ddl/DdlGeneratorHibernate61.java index f304d194fc9..a60af7c015f 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ddl/DdlGeneratorHibernate61.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ddl/DdlGeneratorHibernate61.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.tinder.ddl; import ca.uhn.fhir.jpa.util.ISequenceValueMassager; +import ca.uhn.fhir.util.IoUtil; import jakarta.annotation.Nonnull; import jakarta.persistence.Entity; import jakarta.persistence.MappedSuperclass; @@ -29,6 +30,7 @@ import org.springframework.core.io.ResourceLoader; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.core.type.filter.AnnotationTypeFilter; +import java.io.Closeable; import java.io.File; import java.io.FileWriter; import java.io.IOException; @@ -125,19 +127,8 @@ public class DdlGeneratorHibernate61 { writeContentsToFile(nextDialect.getAppendFile(), classLoader, outputFile); } - } - private static void writeContentsToFile(String prependFile, ClassLoader classLoader, File outputFile) - throws MojoFailureException { - if (isNotBlank(prependFile)) { - ResourceLoader loader = new DefaultResourceLoader(classLoader); - Resource resource = loader.getResource(prependFile); - try (Writer w = new FileWriter(outputFile, true)) { - w.append(resource.getContentAsString(StandardCharsets.UTF_8)); - } catch (IOException e) { - throw new MojoFailureException("Failed to write to file " + outputFile + ": " + e.getMessage(), e); - } - } + IoUtil.closeQuietly(connectionProvider); } public void setProject(MavenProject theProject) { @@ -204,18 +195,64 @@ public class DdlGeneratorHibernate61 { * here. The schema export doesn't actually touch this DB, so it doesn't * matter that it doesn't correlate to the specified dialect. */ - private static class FakeConnectionConnectionProvider extends UserSuppliedConnectionProviderImpl { + private static class FakeConnectionConnectionProvider extends UserSuppliedConnectionProviderImpl + implements Closeable { private static final long serialVersionUID = 4147495169899817244L; + private Connection connection; - @Override - public Connection getConnection() throws SQLException { - ourLog.trace("Using internal driver: {}", org.h2.Driver.class); - return DriverManager.getConnection("jdbc:h2:mem:tmp", "sa", "sa"); + public FakeConnectionConnectionProvider() { + try { + connection = DriverManager.getConnection("jdbc:h2:mem:tmp", "sa", "sa"); + } catch (SQLException e) { + connection = null; + return; + } + + /* + * The Oracle Dialect tries to query for any existing sequences, so we need to supply + * a fake empty table to answer that query. + */ + try { + connection.setAutoCommit(true); + connection + .prepareStatement("create table all_sequences (PID bigint not null, primary key (PID))") + .execute(); + } catch (SQLException e) { + ourLog.error("Failed to create sequences table", e); + } } @Override - public void closeConnection(Connection conn) throws SQLException { - conn.close(); + public Connection getConnection() { + ourLog.trace("Using internal driver: {}", org.h2.Driver.class); + return connection; + } + + @Override + public void closeConnection(Connection conn) { + // ignore + } + + @Override + public void close() throws IOException { + try { + connection.close(); + } catch (SQLException e) { + throw new IOException(e); + } + } + } + + private static void writeContentsToFile(String prependFile, ClassLoader classLoader, File outputFile) + throws MojoFailureException { + if (isNotBlank(prependFile)) { + ResourceLoader loader = new DefaultResourceLoader(classLoader); + Resource resource = loader.getResource(prependFile); + try (Writer w = new FileWriter(outputFile, true)) { + w.append(resource.getContentAsString(StandardCharsets.UTF_8)); + } catch (IOException e) { + throw new MojoFailureException("Failed to write to file " + outputFile + ": " + e.getMessage(), e); + } } } } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ddl/GenerateDdlMojo.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ddl/GenerateDdlMojo.java index 9d3dc65b5fa..a8739463519 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ddl/GenerateDdlMojo.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ddl/GenerateDdlMojo.java @@ -28,13 +28,13 @@ public class GenerateDdlMojo extends AbstractMojo { private static final Logger ourLog = LoggerFactory.getLogger(GenerateDdlMojo.class); @Parameter - private ListpackageNames; + List packageNames; @Parameter - private List dialects; + List dialects; @Parameter - private String outputDirectory; + String outputDirectory; @Parameter(defaultValue = "${project}", readonly = true) private transient MavenProject project; @@ -70,18 +70,20 @@ public class GenerateDdlMojo extends AbstractMojo { public static void main(String[] args) throws MojoExecutionException, MojoFailureException { /* - * Note, to execute this, add the following snippet to this module's POM. The whole project won't work with + * Note, to execute this for real entities, add the following snippet to this module's POM. The whole project won't work with * that added, but you can add it temporarily in order to debug this in IJ: * * + * + * Alternately, there is a unit test with fake entities that also runs this class. */ GenerateDdlMojo m = new GenerateDdlMojo(); m.packageNames = List.of("ca.uhn.fhir.jpa.model.entity"); m.outputDirectory = "hapi-tinder-plugin/target"; - m.dialects = List.of(new Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect", "h2.sql")); + m.dialects = List.of(new Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect", "postgres.sql")); m.execute(); } diff --git a/hapi-tinder-plugin/src/test/java/ca/uhn/fhir/tinder/ddl/GenerateDdlMojoTest.java b/hapi-tinder-plugin/src/test/java/ca/uhn/fhir/tinder/ddl/GenerateDdlMojoTest.java new file mode 100644 index 00000000000..88af72a4b9e --- /dev/null +++ b/hapi-tinder-plugin/src/test/java/ca/uhn/fhir/tinder/ddl/GenerateDdlMojoTest.java @@ -0,0 +1,46 @@ +package ca.uhn.fhir.tinder.ddl; + +import org.apache.commons.io.FileUtils; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugin.MojoFailureException; +import org.junit.jupiter.api.Test; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Locale; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.MatcherAssert.assertThat; + +class GenerateDdlMojoTest { + + @Test + public void testGenerateSequences() throws MojoExecutionException, MojoFailureException, IOException { + + GenerateDdlMojo m = new GenerateDdlMojo(); + m.packageNames = List.of("ca.uhn.fhir.tinder.ddl.test"); + m.outputDirectory = "target/generate-ddl-plugin-test/"; + m.dialects = List.of( + new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect", "h2.sql"), + new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect", "postgres.sql"), + new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirOracleDialect", "oracle.sql"), + new GenerateDdlMojo.Dialect("ca.uhn.fhir.jpa.model.dialect.HapiFhirSQLServerDialect", "sqlserver.sql") + ); + m.execute(); + + verifySequence("sqlserver.sql"); + verifySequence("oracle.sql"); + verifySequence("postgres.sql"); + verifySequence("h2.sql"); + + } + + private static void verifySequence(String fileName) throws IOException { + String contents = FileUtils.readFileToString(new File("target/generate-ddl-plugin-test/" + fileName), StandardCharsets.UTF_8).toUpperCase(Locale.ROOT); + assertThat(fileName, contents, containsString("CREATE SEQUENCE")); + } + + +} diff --git a/hapi-tinder-plugin/src/test/java/ca/uhn/fhir/tinder/ddl/test/ExampleEntity.java b/hapi-tinder-plugin/src/test/java/ca/uhn/fhir/tinder/ddl/test/ExampleEntity.java new file mode 100644 index 00000000000..9c400c45a90 --- /dev/null +++ b/hapi-tinder-plugin/src/test/java/ca/uhn/fhir/tinder/ddl/test/ExampleEntity.java @@ -0,0 +1,25 @@ +package ca.uhn.fhir.tinder.ddl.test; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.SequenceGenerator; +import jakarta.persistence.Table; + +@Table() +@Entity() +public class ExampleEntity { + + @Id + @SequenceGenerator(name = "SEQ_RESOURCE_HISTORY_ID", sequenceName = "SEQ_RESOURCE_HISTORY_ID") + @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RESOURCE_HISTORY_ID") + @Column(name = "PID") + private Long myId; + + @Column(name = "RES_ID", nullable = false, updatable = false, insertable = false) + private Long myResourceId; + + +} diff --git a/pom.xml b/pom.xml index 9c84fe29e1c..8c52e0c4f79 100644 --- a/pom.xml +++ b/pom.xml @@ -2188,7 +2188,21 @@ca.uhn.hapi.fhir *hapi-fhir-jpaserver-model *${project.version} *org.testcontainers testcontainers ${testcontainers_version} -test ++ +org.testcontainers +postgresql +${testcontainers_version} ++ +org.testcontainers +mssqlserver +${testcontainers_version} ++ org.testcontainers +oracle-xe +${testcontainers_version} org.testcontainers @@ -2343,7 +2357,7 @@2000m -XDcompilePolicy=simple --Xplugin:ErrorProne -Xep:MissingSummary:OFF +-Xplugin:ErrorProne -Xep:MissingSummary:OFF -XepExcludedPaths:.*/src/test/java/.* -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED