From ec0021cd409cd821826a6e113684c4ab35a753a2 Mon Sep 17 00:00:00 2001
From: James Agnew
Date: Tue, 18 Jun 2024 10:25:07 -0400
Subject: [PATCH 01/19] Add error checker for duplicate codesystem codes
(#6014)
* Add error checker
* Add changelog
* Address review comment
---
...6014-add-codesystem-dupe-code-checker.yaml | 7 ++++
.../term/TermCodeSystemStorageSvcImpl.java | 28 +++++++++++++
.../r4/FhirResourceDaoR4CodeSystemTest.java | 42 +++++++++++++++++++
3 files changed, 77 insertions(+)
create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6014-add-codesystem-dupe-code-checker.yaml
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6014-add-codesystem-dupe-code-checker.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6014-add-codesystem-dupe-code-checker.yaml
new file mode 100644
index 00000000000..e951d583b90
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6014-add-codesystem-dupe-code-checker.yaml
@@ -0,0 +1,7 @@
+---
+type: add
+issue: 6014
+title: "When uploading an invalid CodeSystem to the JPA server containing
+ duplicate codes, the server responded with an unhelpful error message
+ referring to a database constraint error. This has been fixed so that
+ a more informative error message is returned."
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java
index 15998b0e8e6..af01a692a32 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java
@@ -50,8 +50,10 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
+import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.ObjectUtil;
+import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.util.ValidateUtil;
import jakarta.annotation.Nonnull;
import jakarta.persistence.EntityManager;
@@ -294,6 +296,8 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
theResourceEntity.getIdDt().getValue(),
theCodeSystem.getContentElement().getValueAsString());
+ detectDuplicatesInCodeSystem(theCodeSystem);
+
Long pid = (Long) theCodeSystem.getUserData(RESOURCE_PID_KEY);
assert pid != null;
JpaPid codeSystemResourcePid = JpaPid.fromId(pid);
@@ -339,6 +343,30 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
}
}
+ private static void detectDuplicatesInCodeSystem(CodeSystem theCodeSystem) {
+ detectDuplicatesInCodeSystem(theCodeSystem.getConcept(), new HashSet<>());
+ }
+
+ private static void detectDuplicatesInCodeSystem(
+ List theCodeList, Set theFoundCodesBuffer) {
+ for (var next : theCodeList) {
+ if (isNotBlank(next.getCode())) {
+ if (!theFoundCodesBuffer.add(next.getCode())) {
+ /*
+ * Note: We could possibly modify this behaviour to be forgiving, and just
+ * ignore duplicates. The only issue is that concepts can have properties,
+ * designations, etc. and it could be dangerous to just pick one and ignore the
+ * other. So the safer thing seems to be to just throw an error.
+ */
+ throw new PreconditionFailedException(Msg.code(2528) + "Duplicate concept detected in CodeSystem: "
+ + UrlUtil.sanitizeUrlPart(next.getCode()));
+ }
+ }
+ // Test child concepts within the parent concept
+ detectDuplicatesInCodeSystem(next.getConcept(), theFoundCodesBuffer);
+ }
+ }
+
@Override
@Transactional
public IIdType storeNewCodeSystemVersion(
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CodeSystemTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CodeSystemTest.java
index ae5650ba3be..b2b3ba4f4a3 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CodeSystemTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CodeSystemTest.java
@@ -4,6 +4,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.jpa.test.Batch2JobHelper;
+import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import org.apache.commons.io.IOUtils;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CodeSystem;
@@ -20,6 +21,7 @@ import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.fail;
public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test {
@@ -191,6 +193,46 @@ public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test {
return id;
}
+ @Test
+ public void testCodeSystemWithDuplicateCode() {
+ CodeSystem cs = new CodeSystem();
+ cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
+ cs.setUrl("http://foo");
+ cs.setVersion("1.0");
+ cs.addConcept().setCode("CODE0").setDisplay("Code0");
+ cs.addConcept().setCode("CODE1").setDisplay("Code1");
+ cs.addConcept().setCode("CODE1").setDisplay("Code1");
+ cs.addConcept().setCode("CODE2").setDisplay("Code2");
+
+ try {
+ myCodeSystemDao.create(cs, mySrd);
+ fail();
+ } catch (PreconditionFailedException e) {
+ assertThat(e.getMessage()).contains("Duplicate concept detected in CodeSystem: CODE1");
+ }
+ }
+
+ @Test
+ public void testCodeSystemWithDuplicateCodeInChild() {
+ CodeSystem cs = new CodeSystem();
+ cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
+ cs.setUrl("http://foo");
+ cs.setVersion("1.0");
+
+ CodeSystem.ConceptDefinitionComponent parent = cs.addConcept().setCode("CODE0").setDisplay("Code0");
+ parent.addConcept().setCode("CODE1").setDisplay("Code1");
+ parent.addConcept().setCode("CODE1").setDisplay("Code1");
+ cs.addConcept().setCode("CODE2").setDisplay("Code2");
+
+ try {
+ myCodeSystemDao.create(cs, mySrd);
+ fail();
+ } catch (PreconditionFailedException e) {
+ assertThat(e.getMessage()).contains("Duplicate concept detected in CodeSystem: CODE1");
+ }
+ }
+
+
@AfterAll
public static void afterClassClearContext() {
TermReindexingSvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
From 47d6e357b63c5e969ac7ef4efc862b221e33f13c Mon Sep 17 00:00:00 2001
From: James Agnew
Date: Tue, 18 Jun 2024 13:05:29 -0400
Subject: [PATCH 02/19] Fix JPA nullable primitive columns (#5926)
* Fix JPA nullable primitive columns
* Add changelog
* Spotless
* License headers
* Migration fix
* Spotless
* Cleanup
* Cleanup
* Add task skipping
* Update docs
* CLeanup
* Spotless
* Address review comments
* Test fix
* HAPI FHIR version bump
---
hapi-deployable-pom/pom.xml | 2 +-
hapi-fhir-android/pom.xml | 2 +-
hapi-fhir-base/pom.xml | 2 +-
hapi-fhir-bom/pom.xml | 4 +-
hapi-fhir-checkstyle/pom.xml | 2 +-
hapi-fhir-cli/hapi-fhir-cli-api/pom.xml | 2 +-
.../cli/BaseFlywayMigrateDatabaseCommand.java | 14 +-
hapi-fhir-cli/hapi-fhir-cli-app/pom.xml | 2 +-
hapi-fhir-cli/pom.xml | 2 +-
hapi-fhir-client-okhttp/pom.xml | 2 +-
hapi-fhir-client/pom.xml | 2 +-
hapi-fhir-converter/pom.xml | 2 +-
hapi-fhir-dist/pom.xml | 2 +-
hapi-fhir-docs/pom.xml | 2 +-
...26-fix-jpa-nullable-primitive-columns.yaml | 4 +
...5926-skip-migrators-when-initializing.yaml | 7 +
hapi-fhir-jacoco/pom.xml | 2 +-
hapi-fhir-jaxrsserver-base/pom.xml | 2 +-
hapi-fhir-jpa/pom.xml | 2 +-
hapi-fhir-jpaserver-base/pom.xml | 2 +-
.../jpa/entity/Batch2JobInstanceEntity.java | 4 +-
.../jpa/entity/Batch2WorkChunkEntity.java | 10 +-
.../jpa/entity/HapiFhirEnversRevision.java | 2 +-
.../ca/uhn/fhir/jpa/entity/TermConcept.java | 3 +-
.../tasks/HapiFhirJpaMigrationTasks.java | 273 +++++++++++++-----
.../reindex/InstanceReindexServiceImpl.java | 8 +-
.../pom.xml | 2 +-
hapi-fhir-jpaserver-hfql/pom.xml | 2 +-
hapi-fhir-jpaserver-ips/pom.xml | 2 +-
hapi-fhir-jpaserver-mdm/pom.xml | 2 +-
hapi-fhir-jpaserver-model/pom.xml | 2 +-
.../jpa/model/entity/BinaryStorageEntity.java | 2 +-
.../model/entity/ResourceHistoryTable.java | 4 +-
.../ResourceIndexedComboStringUnique.java | 4 +-
.../ResourceIndexedSearchParamCoords.java | 17 +-
.../fhir/jpa/model/entity/ResourceTable.java | 20 +-
hapi-fhir-jpaserver-searchparam/pom.xml | 2 +-
hapi-fhir-jpaserver-subscription/pom.xml | 2 +-
hapi-fhir-jpaserver-test-dstu2/pom.xml | 2 +-
hapi-fhir-jpaserver-test-dstu3/pom.xml | 2 +-
hapi-fhir-jpaserver-test-r4/pom.xml | 2 +-
hapi-fhir-jpaserver-test-r4b/pom.xml | 2 +-
hapi-fhir-jpaserver-test-r5/pom.xml | 2 +-
hapi-fhir-jpaserver-test-utilities/pom.xml | 2 +-
hapi-fhir-jpaserver-uhnfhirtest/pom.xml | 2 +-
hapi-fhir-server-cds-hooks/pom.xml | 2 +-
hapi-fhir-server-mdm/pom.xml | 2 +-
hapi-fhir-server-openapi/pom.xml | 2 +-
hapi-fhir-server/pom.xml | 2 +-
.../hapi-fhir-caching-api/pom.xml | 2 +-
.../hapi-fhir-caching-caffeine/pom.xml | 4 +-
.../hapi-fhir-caching-guava/pom.xml | 2 +-
.../hapi-fhir-caching-testing/pom.xml | 2 +-
hapi-fhir-serviceloaders/pom.xml | 2 +-
.../pom.xml | 2 +-
.../pom.xml | 2 +-
.../pom.xml | 2 +-
.../pom.xml | 2 +-
.../hapi-fhir-spring-boot-samples/pom.xml | 2 +-
.../hapi-fhir-spring-boot-starter/pom.xml | 2 +-
hapi-fhir-spring-boot/pom.xml | 2 +-
hapi-fhir-sql-migrate/pom.xml | 2 +-
.../ca/uhn/fhir/jpa/migrate/HapiMigrator.java | 47 ++-
.../fhir/jpa/migrate/MigrationTaskList.java | 9 +
.../jpa/migrate/MigrationTaskSkipper.java | 3 +-
.../jpa/migrate/taskdef/AddColumnTask.java | 7 +-
.../migrate/taskdef/AddTableByColumnTask.java | 2 +-
.../jpa/migrate/taskdef/ArbitrarySqlTask.java | 3 +
.../fhir/jpa/migrate/taskdef/BaseTask.java | 140 ++++-----
.../taskdef/ForceIdMigrationFixTask.java | 7 +
.../migrate/taskdef/InitializeSchemaTask.java | 11 +-
.../jpa/migrate/taskdef/ModifyColumnTask.java | 5 +-
.../jpa/migrate/taskdef/RenameTableTask.java | 1 +
.../tasks/SchemaInitializationProvider.java | 9 +-
.../fhir/jpa/migrate/tasks/api/Builder.java | 80 +++--
.../jpa/migrate/tasks/api/TaskFlagEnum.java | 65 +++++
.../migrate/HapiMigrationStorageSvcTest.java | 2 +-
.../uhn/fhir/jpa/migrate/HapiMigratorIT.java | 110 ++++++-
.../taskdef/AddTableByColumnTaskTest.java | 2 +-
.../jpa/migrate/taskdef/ModifyColumnTest.java | 17 +-
.../hapi-migrator-it-init-schema/h2.sql | 6 +
hapi-fhir-storage-batch2-jobs/pom.xml | 2 +-
.../pom.xml | 2 +-
hapi-fhir-storage-batch2/pom.xml | 2 +-
hapi-fhir-storage-cr/pom.xml | 2 +-
hapi-fhir-storage-mdm/pom.xml | 2 +-
hapi-fhir-storage-test-utilities/pom.xml | 2 +-
hapi-fhir-storage/pom.xml | 2 +-
hapi-fhir-structures-dstu2.1/pom.xml | 2 +-
hapi-fhir-structures-dstu2/pom.xml | 2 +-
hapi-fhir-structures-dstu3/pom.xml | 2 +-
hapi-fhir-structures-hl7org-dstu2/pom.xml | 2 +-
hapi-fhir-structures-r4/pom.xml | 2 +-
hapi-fhir-structures-r4b/pom.xml | 2 +-
hapi-fhir-structures-r5/pom.xml | 2 +-
hapi-fhir-test-utilities/pom.xml | 2 +-
.../jpa/JpaModelScannerAndVerifier.java | 21 +-
hapi-fhir-testpage-overlay/pom.xml | 2 +-
.../pom.xml | 2 +-
hapi-fhir-validation-resources-dstu2/pom.xml | 2 +-
hapi-fhir-validation-resources-dstu3/pom.xml | 2 +-
hapi-fhir-validation-resources-r4/pom.xml | 2 +-
hapi-fhir-validation-resources-r4b/pom.xml | 2 +-
hapi-fhir-validation-resources-r5/pom.xml | 2 +-
hapi-fhir-validation/pom.xml | 2 +-
hapi-tinder-plugin/pom.xml | 2 +-
hapi-tinder-test/pom.xml | 2 +-
pom.xml | 2 +-
.../pom.xml | 2 +-
.../pom.xml | 2 +-
.../pom.xml | 2 +-
111 files changed, 723 insertions(+), 354 deletions(-)
create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5926-fix-jpa-nullable-primitive-columns.yaml
create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5926-skip-migrators-when-initializing.yaml
create mode 100644 hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/TaskFlagEnum.java
create mode 100644 hapi-fhir-sql-migrate/src/test/resources/hapi-migrator-it-init-schema/h2.sql
diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml
index 30e1ccb5d6d..259a53de8ae 100644
--- a/hapi-deployable-pom/pom.xml
+++ b/hapi-deployable-pom/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml
index 9060665d1b3..2319bf9a4cc 100644
--- a/hapi-fhir-android/pom.xml
+++ b/hapi-fhir-android/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml
index fcc60e05e2b..03e4d113f4c 100644
--- a/hapi-fhir-base/pom.xml
+++ b/hapi-fhir-base/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml
index 3a00980cded..1dec9586365 100644
--- a/hapi-fhir-bom/pom.xml
+++ b/hapi-fhir-bom/pom.xml
@@ -4,7 +4,7 @@
4.0.0ca.uhn.hapi.fhirhapi-fhir-bom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOTpomHAPI FHIR BOM
@@ -12,7 +12,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml
index 2a21a1796fe..4bdbe08114a 100644
--- a/hapi-fhir-checkstyle/pom.xml
+++ b/hapi-fhir-checkstyle/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
index 3555f67f63c..e53b6e39633 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseFlywayMigrateDatabaseCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseFlywayMigrateDatabaseCommand.java
index 5d049b79246..dbc5be5ad41 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseFlywayMigrateDatabaseCommand.java
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseFlywayMigrateDatabaseCommand.java
@@ -26,8 +26,6 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.List;
@@ -40,12 +38,12 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
* NB since 2019-12-05: This class is kind of weirdly named now, since it can either use Flyway or not use Flyway
*/
public abstract class BaseFlywayMigrateDatabaseCommand extends BaseCommand {
- private static final Logger ourLog = LoggerFactory.getLogger(BaseFlywayMigrateDatabaseCommand.class);
public static final String MIGRATE_DATABASE = "migrate-database";
public static final String NO_COLUMN_SHRINK = "no-column-shrink";
- public static final String STRICT_ORDER = "strict-order";
public static final String SKIP_VERSIONS = "skip-versions";
+ public static final String ENABLE_HEAVYWEIGHT_MIGRATIONS = "enable-heavyweight-migrations";
+
private Set myFlags;
private String myMigrationTableName;
@@ -100,6 +98,12 @@ public abstract class BaseFlywayMigrateDatabaseCommand extends B
SKIP_VERSIONS,
"Versions",
"A comma separated list of schema versions to skip. E.g. 4_1_0.20191214.2,4_1_0.20191214.4");
+ addOptionalOption(
+ retVal,
+ null,
+ ENABLE_HEAVYWEIGHT_MIGRATIONS,
+ false,
+ "If this flag is set, additional migration tasks will be executed that are considered unnecessary to execute on a database with a significant amount of data loaded. This option is not generally necessary.");
return retVal;
}
@@ -125,6 +129,7 @@ public abstract class BaseFlywayMigrateDatabaseCommand extends B
boolean dryRun = theCommandLine.hasOption("r");
boolean noColumnShrink = theCommandLine.hasOption(BaseFlywayMigrateDatabaseCommand.NO_COLUMN_SHRINK);
+ boolean runHeavyweight = theCommandLine.hasOption(ENABLE_HEAVYWEIGHT_MIGRATIONS);
String flags = theCommandLine.getOptionValue("x");
myFlags = Arrays.stream(defaultString(flags).split(","))
@@ -139,6 +144,7 @@ public abstract class BaseFlywayMigrateDatabaseCommand extends B
migrator.createMigrationTableIfRequired();
migrator.setDryRun(dryRun);
+ migrator.setRunHeavyweightSkippableTasks(runHeavyweight);
migrator.setNoColumnShrink(noColumnShrink);
String skipVersions = theCommandLine.getOptionValue(BaseFlywayMigrateDatabaseCommand.SKIP_VERSIONS);
addTasks(migrator, skipVersions);
diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
index b6c73678960..45ecdebddca 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhirhapi-fhir-cli
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml
index 6745d62518b..2eb4c555392 100644
--- a/hapi-fhir-cli/pom.xml
+++ b/hapi-fhir-cli/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml
index 9a7ac810865..b2fa315c0ee 100644
--- a/hapi-fhir-client-okhttp/pom.xml
+++ b/hapi-fhir-client-okhttp/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml
index de0d7f14afe..b5667ad3e0c 100644
--- a/hapi-fhir-client/pom.xml
+++ b/hapi-fhir-client/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml
index 9883e898092..3cfc40c6258 100644
--- a/hapi-fhir-converter/pom.xml
+++ b/hapi-fhir-converter/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml
index 9e001be27ab..f008db1fd69 100644
--- a/hapi-fhir-dist/pom.xml
+++ b/hapi-fhir-dist/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml
index ba6c6912dbc..ab973c0f6b8 100644
--- a/hapi-fhir-docs/pom.xml
+++ b/hapi-fhir-docs/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5926-fix-jpa-nullable-primitive-columns.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5926-fix-jpa-nullable-primitive-columns.yaml
new file mode 100644
index 00000000000..a444a25f548
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5926-fix-jpa-nullable-primitive-columns.yaml
@@ -0,0 +1,4 @@
+---
+type: fix
+issue: 5926
+title: "A number of columns in the JPA schema use primitive types (and therefore can never have a null value) but aren't marked as non-null."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5926-skip-migrators-when-initializing.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5926-skip-migrators-when-initializing.yaml
new file mode 100644
index 00000000000..cd3506ee7af
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5926-skip-migrators-when-initializing.yaml
@@ -0,0 +1,7 @@
+---
+type: fix
+issue: 5926
+title: "A regression in HAPI FHIR 6.4.0 meant that ther JPA server schema migrator ran all tasks
+ even when the database was initially empty and the schema was being initialized by script.
+ This did not produce any incorrect results, but did impact the amount of time taken to initialize
+ an empty database. This has been corrected."
diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml
index 7d35c5c03fa..7cc8d05502a 100644
--- a/hapi-fhir-jacoco/pom.xml
+++ b/hapi-fhir-jacoco/pom.xml
@@ -11,7 +11,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml
index aa02e821f97..300c0cb720b 100644
--- a/hapi-fhir-jaxrsserver-base/pom.xml
+++ b/hapi-fhir-jaxrsserver-base/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml
index 145cdb4224f..5885d7a365e 100644
--- a/hapi-fhir-jpa/pom.xml
+++ b/hapi-fhir-jpa/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml
index d628e03ee5c..230c0a53a73 100644
--- a/hapi-fhir-jpaserver-base/pom.xml
+++ b/hapi-fhir-jpaserver-base/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2JobInstanceEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2JobInstanceEntity.java
index bcf0a6cc0d9..3da31581d26 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2JobInstanceEntity.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2JobInstanceEntity.java
@@ -119,13 +119,13 @@ public class Batch2JobInstanceEntity implements Serializable {
@Column(name = "WORK_CHUNKS_PURGED", nullable = false)
private boolean myWorkChunksPurged;
- @Column(name = "PROGRESS_PCT")
+ @Column(name = "PROGRESS_PCT", nullable = false)
private double myProgress;
@Column(name = "ERROR_MSG", length = ERROR_MSG_MAX_LENGTH, nullable = true)
private String myErrorMessage;
- @Column(name = "ERROR_COUNT")
+ @Column(name = "ERROR_COUNT", nullable = false)
private int myErrorCount;
@Column(name = "EST_REMAINING", length = TIME_REMAINING_LENGTH, nullable = true)
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2WorkChunkEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2WorkChunkEntity.java
index db60706f476..26bccd58bf9 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2WorkChunkEntity.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2WorkChunkEntity.java
@@ -138,6 +138,10 @@ public class Batch2WorkChunkEntity implements Serializable {
/**
* The number of times the work chunk has had its state set back to POLL_WAITING.
+ *
+ * TODO: Note that this column was added in 7.2.0, so it is nullable in order to
+ * account for existing rows that were added before the column was added. In
+ * the future we should make this non-null.
*/
@Column(name = "POLL_ATTEMPTS", nullable = true)
private Integer myPollAttempts;
@@ -145,7 +149,9 @@ public class Batch2WorkChunkEntity implements Serializable {
/**
* Default constructor for Hibernate.
*/
- public Batch2WorkChunkEntity() {}
+ public Batch2WorkChunkEntity() {
+ myPollAttempts = 0;
+ }
/**
* Projection constructor for no-data path.
@@ -184,7 +190,7 @@ public class Batch2WorkChunkEntity implements Serializable {
myRecordsProcessed = theRecordsProcessed;
myWarningMessage = theWarningMessage;
myNextPollTime = theNextPollTime;
- myPollAttempts = thePollAttempts;
+ myPollAttempts = thePollAttempts != null ? thePollAttempts : 0;
}
public static Batch2WorkChunkEntity fromWorkChunk(WorkChunk theWorkChunk) {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/HapiFhirEnversRevision.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/HapiFhirEnversRevision.java
index 3a930be0e27..68c7ac12f7e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/HapiFhirEnversRevision.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/HapiFhirEnversRevision.java
@@ -58,7 +58,7 @@ public class HapiFhirEnversRevision implements Serializable {
@SequenceGenerator(name = "SEQ_HFJ_REVINFO", sequenceName = "SEQ_HFJ_REVINFO")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_HFJ_REVINFO")
@RevisionNumber
- @Column(name = "REV")
+ @Column(name = "REV", nullable = false)
private long myRev;
@RevisionTimestamp
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java
index d238278bcfe..5f056979fea 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java
@@ -117,11 +117,12 @@ public class TermConcept implements Serializable {
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(
name = "CODESYSTEM_PID",
+ nullable = false,
referencedColumnName = "PID",
foreignKey = @ForeignKey(name = "FK_CONCEPT_PID_CS_PID"))
private TermCodeSystemVersion myCodeSystem;
- @Column(name = "CODESYSTEM_PID", insertable = false, updatable = false)
+ @Column(name = "CODESYSTEM_PID", insertable = false, updatable = false, nullable = false)
@GenericField(name = "myCodeSystemVersionPid")
private long myCodeSystemVersionPid;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
index 0db112c60fd..7bcebb19fda 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
@@ -25,7 +25,6 @@ import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask;
-import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask;
import ca.uhn.fhir.jpa.migrate.taskdef.CalculateOrdinalDatesTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ColumnTypeEnum;
@@ -33,6 +32,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.ForceIdMigrationCopyTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ForceIdMigrationFixTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
+import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
@@ -135,6 +135,121 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
.toColumn("RES_ID")
.references("HFJ_RESOURCE", "RES_ID");
}
+
+ /*
+ * Make a bunch of columns non-nullable. Note that we won't actually apply this migration
+ * on the live system as it would take a loooooong time to execute these on heavily loaded
+ * databases.
+ */
+ // Skipping numbers 20240601.1 and 20240601.2 as they were found to not
+ // be needed during code review.
+ version.onTable("HFJ_RESOURCE")
+ .modifyColumn("20240601.3", "SP_HAS_LINKS")
+ .nonNullable()
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+ version.onTable("HFJ_RESOURCE")
+ .modifyColumn("20240601.4", "SP_COORDS_PRESENT")
+ .nonNullable()
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+ version.onTable("HFJ_RESOURCE")
+ .modifyColumn("20240601.5", "SP_DATE_PRESENT")
+ .nonNullable()
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+ version.onTable("HFJ_RESOURCE")
+ .modifyColumn("20240601.6", "SP_NUMBER_PRESENT")
+ .nonNullable()
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+ version.onTable("HFJ_RESOURCE")
+ .modifyColumn("20240601.7", "SP_QUANTITY_PRESENT")
+ .nonNullable()
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+ version.onTable("HFJ_RESOURCE")
+ .modifyColumn("20240601.8", "SP_QUANTITY_NRML_PRESENT")
+ .nonNullable()
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+ version.onTable("HFJ_RESOURCE")
+ .modifyColumn("20240601.9", "SP_STRING_PRESENT")
+ .nonNullable()
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+ version.onTable("HFJ_RESOURCE")
+ .modifyColumn("20240601.10", "SP_TOKEN_PRESENT")
+ .nonNullable()
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+ version.onTable("HFJ_RESOURCE")
+ .modifyColumn("20240601.11", "SP_URI_PRESENT")
+ .nonNullable()
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+ version.onTable("HFJ_RESOURCE")
+ .modifyColumn("20240601.12", "RES_VER")
+ .nonNullable()
+ .withType(ColumnTypeEnum.LONG)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+ version.onTable("TRM_CONCEPT")
+ .modifyColumn("20240601.13", "CODESYSTEM_PID")
+ .nonNullable()
+ .withType(ColumnTypeEnum.LONG)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+ version.onTable("BT2_JOB_INSTANCE")
+ .modifyColumn("20240601.14", "PROGRESS_PCT")
+ .nonNullable()
+ .withType(ColumnTypeEnum.DOUBLE)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+ version.onTable("BT2_JOB_INSTANCE")
+ .modifyColumn("20240601.15", "ERROR_COUNT")
+ .nonNullable()
+ .withType(ColumnTypeEnum.INT)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+ version.onTable("HFJ_BINARY_STORAGE_BLOB")
+ .modifyColumn("20240601.16", "BLOB_SIZE")
+ .nonNullable()
+ .withType(ColumnTypeEnum.LONG)
+ .heavyweightSkipByDefault()
+ .failureAllowed();
+
+ /*
+ * Add RES_ID to two indexes on HFJ_RES_VER which support history operations.
+ * This makes server and type level _history work properly on large databases
+ * on postgres. These are both marked as heavyweightSkipByDefault because the
+ * necessary reindexing would be very expensive for a rarely used FHIR feature.
+ */
+ version.onTable("HFJ_RES_VER")
+ .dropIndex("20240601.17", "IDX_RESVER_TYPE_DATE")
+ .heavyweightSkipByDefault();
+ version.onTable("HFJ_RES_VER")
+ .addIndex("20240601.18", "IDX_RESVER_TYPE_DATE")
+ .unique(false)
+ .withColumns("RES_TYPE", "RES_UPDATED", "RES_ID")
+ .heavyweightSkipByDefault();
+ version.onTable("HFJ_RES_VER")
+ .dropIndex("20240601.19", "IDX_RESVER_DATE")
+ .heavyweightSkipByDefault();
+ version.onTable("HFJ_RES_VER")
+ .addIndex("20240601.20", "IDX_RESVER_DATE")
+ .unique(false)
+ .withColumns("RES_UPDATED", "RES_ID")
+ .heavyweightSkipByDefault();
}
protected void init720() {
@@ -162,15 +277,15 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
binaryStorageBlobTable
.renameColumn("20240404.1", "BLOB_ID", "CONTENT_ID")
.getLastAddedTask()
- .ifPresent(BaseTask::doNothing);
+ .ifPresent(t -> t.addFlag(TaskFlagEnum.DO_NOTHING));
binaryStorageBlobTable
.renameColumn("20240404.2", "BLOB_SIZE", "CONTENT_SIZE")
.getLastAddedTask()
- .ifPresent(BaseTask::doNothing);
+ .ifPresent(t -> t.addFlag(TaskFlagEnum.DO_NOTHING));
binaryStorageBlobTable
.renameColumn("20240404.3", "BLOB_HASH", "CONTENT_HASH")
.getLastAddedTask()
- .ifPresent(BaseTask::doNothing);
+ .ifPresent(t -> t.addFlag(TaskFlagEnum.DO_NOTHING));
binaryStorageBlobTable
.modifyColumn("20240404.4", "BLOB_DATA")
@@ -262,7 +377,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
// Move forced_id constraints to hfj_resource and the new fhir_id column
// Note: we leave the HFJ_FORCED_ID.IDX_FORCEDID_TYPE_FID index in place to support old writers for a while.
- version.addTask(new ForceIdMigrationCopyTask(version.getRelease(), "20231018.1").setDoNothing(true));
+ version.addTask(
+ new ForceIdMigrationCopyTask(version.getRelease(), "20231018.1").addFlag(TaskFlagEnum.DO_NOTHING));
Builder.BuilderWithTableName hfjResource = version.onTable("HFJ_RESOURCE");
// commented out to make numeric space for the fix task below.
@@ -331,7 +447,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
}
// This fix was bad for MSSQL, it has been set to do nothing.
- version.addTask(new ForceIdMigrationFixTask(version.getRelease(), "20231213.1").setDoNothing(true));
+ version.addTask(
+ new ForceIdMigrationFixTask(version.getRelease(), "20231213.1").addFlag(TaskFlagEnum.DO_NOTHING));
// This fix will work for MSSQL or Oracle.
version.addTask(new ForceIdMigrationFixTask(version.getRelease(), "20231222.1"));
@@ -814,8 +931,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
version.onTable("HFJ_RES_VER")
.modifyColumn("20230421.1", "RES_TEXT_VC")
.nullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.TEXT);
+ .withType(ColumnTypeEnum.TEXT)
+ .failureAllowed();
{
// add hash_norm to res_id to speed up joins on a second string.
@@ -1751,8 +1868,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
version.onTable("HFJ_RES_LINK")
.modifyColumn("20210505.1", "SRC_PATH")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 500);
+ .withType(ColumnTypeEnum.STRING, 500)
+ .failureAllowed();
}
private void init530() {
@@ -1813,8 +1930,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
quantityTable
.modifyColumn("20210116.1", "SP_VALUE")
.nullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.DOUBLE);
+ .withType(ColumnTypeEnum.DOUBLE)
+ .failureAllowed();
// HFJ_RES_LINK
version.onTable("HFJ_RES_LINK")
@@ -2011,8 +2128,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
version.onTable("HFJ_RES_VER")
.modifyColumn("20200220.1", "RES_ID")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.LONG);
+ .withType(ColumnTypeEnum.LONG)
+ .failureAllowed();
//
// Drop unused column
@@ -2168,38 +2285,38 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
version.onTable("HFJ_SPIDX_STRING")
.modifyColumn("20200420.36", "SP_MISSING")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.BOOLEAN);
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_COORDS")
.modifyColumn("20200420.37", "SP_MISSING")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.BOOLEAN);
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_NUMBER")
.modifyColumn("20200420.38", "SP_MISSING")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.BOOLEAN);
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_TOKEN")
.modifyColumn("20200420.39", "SP_MISSING")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.BOOLEAN);
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_DATE")
.modifyColumn("20200420.40", "SP_MISSING")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.BOOLEAN);
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_URI")
.modifyColumn("20200420.41", "SP_MISSING")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.BOOLEAN);
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_QUANTITY")
.modifyColumn("20200420.42", "SP_MISSING")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.BOOLEAN);
+ .withType(ColumnTypeEnum.BOOLEAN)
+ .failureAllowed();
// Add support for integer comparisons during day-precision date search.
Builder.BuilderWithTableName spidxDate = version.onTable("HFJ_SPIDX_DATE");
@@ -2309,38 +2426,38 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
version.onTable("HFJ_SPIDX_NUMBER")
.modifyColumn("20190920.1", "RES_ID")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.LONG);
+ .withType(ColumnTypeEnum.LONG)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_COORDS")
.modifyColumn("20190920.2", "RES_ID")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.LONG);
+ .withType(ColumnTypeEnum.LONG)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_TOKEN")
.modifyColumn("20190920.3", "RES_ID")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.LONG);
+ .withType(ColumnTypeEnum.LONG)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_STRING")
.modifyColumn("20190920.4", "RES_ID")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.LONG);
+ .withType(ColumnTypeEnum.LONG)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_DATE")
.modifyColumn("20190920.5", "RES_ID")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.LONG);
+ .withType(ColumnTypeEnum.LONG)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_QUANTITY")
.modifyColumn("20190920.6", "RES_ID")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.LONG);
+ .withType(ColumnTypeEnum.LONG)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_URI")
.modifyColumn("20190920.7", "RES_ID")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.LONG);
+ .withType(ColumnTypeEnum.LONG)
+ .failureAllowed();
// HFJ_SEARCH
version.onTable("HFJ_SEARCH")
@@ -2469,33 +2586,33 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
version.onTable("HFJ_RESOURCE")
.modifyColumn("20191002.1", "RES_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 40);
+ .withType(ColumnTypeEnum.STRING, 40)
+ .failureAllowed();
version.onTable("HFJ_RES_VER")
.modifyColumn("20191002.2", "RES_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 40);
+ .withType(ColumnTypeEnum.STRING, 40)
+ .failureAllowed();
version.onTable("HFJ_HISTORY_TAG")
.modifyColumn("20191002.3", "RES_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 40);
+ .withType(ColumnTypeEnum.STRING, 40)
+ .failureAllowed();
version.onTable("HFJ_RES_LINK")
.modifyColumn("20191002.4", "SOURCE_RESOURCE_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 40);
+ .withType(ColumnTypeEnum.STRING, 40)
+ .failureAllowed();
version.onTable("HFJ_RES_LINK")
.modifyColumn("20191002.5", "TARGET_RESOURCE_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 40);
+ .withType(ColumnTypeEnum.STRING, 40)
+ .failureAllowed();
version.onTable("HFJ_RES_TAG")
.modifyColumn("20191002.6", "RES_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 40);
+ .withType(ColumnTypeEnum.STRING, 40)
+ .failureAllowed();
// TermConceptDesignation
version.startSectionWithMessage("Processing table: TRM_CONCEPT_DESIG");
@@ -2765,18 +2882,18 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
version.onTable("HFJ_SPIDX_COORDS")
.modifyColumn("20190814.9", "RES_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 100);
+ .withType(ColumnTypeEnum.STRING, 100)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_DATE")
.modifyColumn("20190814.10", "RES_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 100);
+ .withType(ColumnTypeEnum.STRING, 100)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_STRING")
.modifyColumn("20190814.11", "RES_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 100);
+ .withType(ColumnTypeEnum.STRING, 100)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_STRING")
.addColumn("20190814.12", "HASH_IDENTITY")
.nullable()
@@ -2788,50 +2905,50 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
version.onTable("HFJ_SPIDX_COORDS")
.modifyColumn("20190814.14", "RES_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 100);
+ .withType(ColumnTypeEnum.STRING, 100)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_QUANTITY")
.modifyColumn("20190814.15", "RES_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 100);
+ .withType(ColumnTypeEnum.STRING, 100)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.16", "HASH_UNITS_AND_VALPREFIX");
version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.17", "HASH_VALPREFIX");
version.onTable("HFJ_SPIDX_NUMBER")
.modifyColumn("20190814.18", "RES_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 100);
+ .withType(ColumnTypeEnum.STRING, 100)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_TOKEN")
.modifyColumn("20190814.19", "RES_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 100);
+ .withType(ColumnTypeEnum.STRING, 100)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_URI")
.modifyColumn("20190814.20", "RES_TYPE")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 100);
+ .withType(ColumnTypeEnum.STRING, 100)
+ .failureAllowed();
version.onTable("HFJ_SPIDX_URI")
.modifyColumn("20190814.21", "SP_URI")
.nullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 254);
+ .withType(ColumnTypeEnum.STRING, 254)
+ .failureAllowed();
version.onTable("TRM_CODESYSTEM")
.modifyColumn("20190814.22", "CODE_SYSTEM_URI")
.nonNullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 200);
+ .withType(ColumnTypeEnum.STRING, 200)
+ .failureAllowed();
version.onTable("TRM_CODESYSTEM")
.modifyColumn("20190814.23", "CS_NAME")
.nullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 200);
+ .withType(ColumnTypeEnum.STRING, 200)
+ .failureAllowed();
version.onTable("TRM_CODESYSTEM_VER")
.modifyColumn("20190814.24", "CS_VERSION_ID")
.nullable()
- .failureAllowed()
- .withType(ColumnTypeEnum.STRING, 200);
+ .withType(ColumnTypeEnum.STRING, 200)
+ .failureAllowed();
}
private void init360() { // 20180918 - 20181112
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java
index 8a69d6ef8e0..aeb8b9e31cd 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java
@@ -524,8 +524,12 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
String theParamTypeName) {
Parameters.ParametersParameterComponent retVal =
super.addIndexValue(theAction, theParent, theParam, theParamTypeName);
- retVal.addPart().setName("Latitude").setValue(new DecimalType(theParam.getLatitude()));
- retVal.addPart().setName("Longitude").setValue(new DecimalType(theParam.getLongitude()));
+ if (theParam.getLatitude() != null) {
+ retVal.addPart().setName("Latitude").setValue(new DecimalType(theParam.getLatitude()));
+ }
+ if (theParam.getLongitude() != null) {
+ retVal.addPart().setName("Longitude").setValue(new DecimalType(theParam.getLongitude()));
+ }
return retVal;
}
}
diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
index e3ba27996ee..f0f55e9506f 100644
--- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
+++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-hfql/pom.xml b/hapi-fhir-jpaserver-hfql/pom.xml
index 2ae9ad02678..aaa57b40118 100644
--- a/hapi-fhir-jpaserver-hfql/pom.xml
+++ b/hapi-fhir-jpaserver-hfql/pom.xml
@@ -3,7 +3,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml
index 88e32f7e2d6..2aea1bf2f19 100644
--- a/hapi-fhir-jpaserver-ips/pom.xml
+++ b/hapi-fhir-jpaserver-ips/pom.xml
@@ -3,7 +3,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml
index 669f52ea6ba..7a7c9f7ab64 100644
--- a/hapi-fhir-jpaserver-mdm/pom.xml
+++ b/hapi-fhir-jpaserver-mdm/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml
index 1df82dac9f4..c453a3c30ab 100644
--- a/hapi-fhir-jpaserver-model/pom.xml
+++ b/hapi-fhir-jpaserver-model/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java
index c6b046e0bff..f6d77040763 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java
@@ -45,7 +45,7 @@ public class BinaryStorageEntity {
@Column(name = "RESOURCE_ID", length = 100, nullable = false)
private String myResourceId;
- @Column(name = "BLOB_SIZE", nullable = true)
+ @Column(name = "BLOB_SIZE", nullable = false)
private long mySize;
@Column(name = "CONTENT_TYPE", nullable = false, length = 100)
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java
index 6348ce579b3..8ef37d8cdca 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java
@@ -60,9 +60,9 @@ import java.util.Collection;
columnNames = {"RES_ID", "RES_VER"})
},
indexes = {
- @Index(name = "IDX_RESVER_TYPE_DATE", columnList = "RES_TYPE,RES_UPDATED"),
+ @Index(name = "IDX_RESVER_TYPE_DATE", columnList = "RES_TYPE,RES_UPDATED,RES_ID"),
@Index(name = "IDX_RESVER_ID_DATE", columnList = "RES_ID,RES_UPDATED"),
- @Index(name = "IDX_RESVER_DATE", columnList = "RES_UPDATED")
+ @Index(name = "IDX_RESVER_DATE", columnList = "RES_UPDATED,RES_ID")
})
public class ResourceHistoryTable extends BaseHasResource implements Serializable {
public static final String IDX_RESVER_ID_VER = "IDX_RESVER_ID_VER";
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboStringUnique.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboStringUnique.java
index 830fc6270aa..e61941cb611 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboStringUnique.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboStringUnique.java
@@ -135,12 +135,14 @@ public class ResourceIndexedComboStringUnique extends BasePartitionable
myIndexString = theIndexString;
}
+ @Override
public ResourceTable getResource() {
return myResource;
}
+ @Override
public void setResource(ResourceTable theResource) {
- Validate.notNull(theResource);
+ Validate.notNull(theResource, "theResource must not be null");
myResource = theResource;
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java
index 40718b09df8..a66e5f6f564 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java
@@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.model.api.IQueryParameterType;
+import jakarta.annotation.Nullable;
import jakarta.persistence.Column;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
@@ -56,13 +57,11 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
private static final long serialVersionUID = 1L;
- @Column(name = "SP_LATITUDE")
- // @FullTextField
- public double myLatitude;
+ @Column(name = "SP_LATITUDE", nullable = true)
+ public Double myLatitude;
- @Column(name = "SP_LONGITUDE")
- // @FullTextField
- public double myLongitude;
+ @Column(name = "SP_LONGITUDE", nullable = true)
+ public Double myLongitude;
@Id
@SequenceGenerator(name = "SEQ_SPIDX_COORDS", sequenceName = "SEQ_SPIDX_COORDS")
@@ -162,7 +161,8 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
myId = theId;
}
- public double getLatitude() {
+ @Nullable
+ public Double getLatitude() {
return myLatitude;
}
@@ -171,7 +171,8 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
return this;
}
- public double getLongitude() {
+ @Nullable
+ public Double getLongitude() {
return myLongitude;
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java
index 39f85198aec..e897ae71786 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java
@@ -142,7 +142,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true)
private String myHashSha256;
- @Column(name = "SP_HAS_LINKS")
+ @Column(name = "SP_HAS_LINKS", nullable = false)
@OptimisticLock(excluded = true)
private boolean myHasLinks;
@@ -203,7 +203,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true)
private Collection myParamsCoords;
- @Column(name = "SP_COORDS_PRESENT")
+ @Column(name = "SP_COORDS_PRESENT", nullable = false)
@OptimisticLock(excluded = true)
private boolean myParamsCoordsPopulated;
@@ -215,7 +215,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true)
private Collection myParamsDate;
- @Column(name = "SP_DATE_PRESENT")
+ @Column(name = "SP_DATE_PRESENT", nullable = false)
@OptimisticLock(excluded = true)
private boolean myParamsDatePopulated;
@@ -227,7 +227,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
orphanRemoval = false)
private Collection myParamsNumber;
- @Column(name = "SP_NUMBER_PRESENT")
+ @Column(name = "SP_NUMBER_PRESENT", nullable = false)
@OptimisticLock(excluded = true)
private boolean myParamsNumberPopulated;
@@ -239,7 +239,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true)
private Collection myParamsQuantity;
- @Column(name = "SP_QUANTITY_PRESENT")
+ @Column(name = "SP_QUANTITY_PRESENT", nullable = false)
@OptimisticLock(excluded = true)
private boolean myParamsQuantityPopulated;
@@ -260,7 +260,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
* NOTE : use Boolean class instead of boolean primitive, in order to set the existing rows to null
* since 5.3.0
*/
- @Column(name = "SP_QUANTITY_NRML_PRESENT")
+ @Column(name = "SP_QUANTITY_NRML_PRESENT", nullable = false)
@OptimisticLock(excluded = true)
private Boolean myParamsQuantityNormalizedPopulated = Boolean.FALSE;
@@ -272,7 +272,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true)
private Collection myParamsString;
- @Column(name = "SP_STRING_PRESENT")
+ @Column(name = "SP_STRING_PRESENT", nullable = false)
@OptimisticLock(excluded = true)
private boolean myParamsStringPopulated;
@@ -284,7 +284,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true)
private Collection myParamsToken;
- @Column(name = "SP_TOKEN_PRESENT")
+ @Column(name = "SP_TOKEN_PRESENT", nullable = false)
@OptimisticLock(excluded = true)
private boolean myParamsTokenPopulated;
@@ -296,7 +296,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true)
private Collection myParamsUri;
- @Column(name = "SP_URI_PRESENT")
+ @Column(name = "SP_URI_PRESENT", nullable = false)
@OptimisticLock(excluded = true)
private boolean myParamsUriPopulated;
@@ -404,7 +404,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
private Boolean mySearchUrlPresent = false;
@Version
- @Column(name = "RES_VER")
+ @Column(name = "RES_VER", nullable = false)
private long myVersion;
@OneToMany(mappedBy = "myResourceTable", fetch = FetchType.LAZY)
diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml
index 675432baedc..7199a9ad0bc 100755
--- a/hapi-fhir-jpaserver-searchparam/pom.xml
+++ b/hapi-fhir-jpaserver-searchparam/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml
index 928e4754d88..3840df8a715 100644
--- a/hapi-fhir-jpaserver-subscription/pom.xml
+++ b/hapi-fhir-jpaserver-subscription/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml
index 6d526c2967a..ed44b54b06d 100644
--- a/hapi-fhir-jpaserver-test-dstu2/pom.xml
+++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml
index 75747b68a7f..ebf988b81f9 100644
--- a/hapi-fhir-jpaserver-test-dstu3/pom.xml
+++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml
index 9ac14b62183..a05fcda4c4b 100644
--- a/hapi-fhir-jpaserver-test-r4/pom.xml
+++ b/hapi-fhir-jpaserver-test-r4/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml
index 75d284dddda..5ad8360e422 100644
--- a/hapi-fhir-jpaserver-test-r4b/pom.xml
+++ b/hapi-fhir-jpaserver-test-r4b/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml
index d8ed3a11ea3..0585c6f87aa 100644
--- a/hapi-fhir-jpaserver-test-r5/pom.xml
+++ b/hapi-fhir-jpaserver-test-r5/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml
index 323a1e00051..ea643589d1b 100644
--- a/hapi-fhir-jpaserver-test-utilities/pom.xml
+++ b/hapi-fhir-jpaserver-test-utilities/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
index da796517f5b..174a429705a 100644
--- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
+++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-server-cds-hooks/pom.xml b/hapi-fhir-server-cds-hooks/pom.xml
index 3bef5a5c5df..adbe430ad0f 100644
--- a/hapi-fhir-server-cds-hooks/pom.xml
+++ b/hapi-fhir-server-cds-hooks/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml
index bddca577b5b..b7e48476ad6 100644
--- a/hapi-fhir-server-mdm/pom.xml
+++ b/hapi-fhir-server-mdm/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml
index c1f03e6eca8..c9f0f0a14c7 100644
--- a/hapi-fhir-server-openapi/pom.xml
+++ b/hapi-fhir-server-openapi/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml
index bc844b33a29..84c16998ebc 100644
--- a/hapi-fhir-server/pom.xml
+++ b/hapi-fhir-server/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
index a4eb90d2992..288431d981c 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloadersca.uhn.hapi.fhir
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
index 614cfc967b7..46a455edf0a 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloadersca.uhn.hapi.fhir
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../pom.xml
@@ -21,7 +21,7 @@
ca.uhn.hapi.fhirhapi-fhir-caching-api
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
index 322eb544e6c..1629077b0fd 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloadersca.uhn.hapi.fhir
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
index 782403fec87..d805e44028c 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
@@ -7,7 +7,7 @@
hapi-fhirca.uhn.hapi.fhir
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../../pom.xml
diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml
index 9a56ecfd799..87384aa0ef1 100644
--- a/hapi-fhir-serviceloaders/pom.xml
+++ b/hapi-fhir-serviceloaders/pom.xml
@@ -5,7 +5,7 @@
hapi-deployable-pomca.uhn.hapi.fhir
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
index 3e2354e4901..e49c57173a6 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
index 12079ac6303..b042bb09416 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir-spring-boot-samples
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOThapi-fhir-spring-boot-sample-client-apache
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
index f7631efb99b..0d55565aa40 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir-spring-boot-samples
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
index e7be98afcd8..9776f578481 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir-spring-boot-samples
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
index 3172d609d8a..b83b51ba012 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir-spring-boot
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
index ff62090b5d1..9c63563684f 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml
index afb870730a1..aa18dfb90ff 100644
--- a/hapi-fhir-spring-boot/pom.xml
+++ b/hapi-fhir-spring-boot/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml
index ce8c9c06fe6..a7cc58ab8fe 100644
--- a/hapi-fhir-sql-migrate/pom.xml
+++ b/hapi-fhir-sql-migrate/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.3.7-SNAPSHOT
+ 7.3.8-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrator.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrator.java
index dd8afdce598..17ee41c8032 100644
--- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrator.java
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrator.java
@@ -23,6 +23,7 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
import ca.uhn.fhir.jpa.migrate.taskdef.InitializeSchemaTask;
+import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import ca.uhn.fhir.system.HapiSystemProperties;
import ca.uhn.fhir.util.StopWatch;
import com.google.common.annotations.VisibleForTesting;
@@ -44,6 +45,7 @@ public class HapiMigrator {
private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrator.class);
private final MigrationTaskList myTaskList = new MigrationTaskList();
private boolean myDryRun;
+ private boolean myRunHeavyweightSkippableTasks;
private boolean myNoColumnShrink;
private final DriverTypeEnum myDriverType;
private final DataSource myDataSource;
@@ -69,6 +71,24 @@ public class HapiMigrator {
myDryRun = theDryRun;
}
+ /**
+ * Should we run the tasks marked with {@link ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum#HEAVYWEIGHT_SKIP_BY_DEFAULT}
+ *
+ * @since 7.4.0
+ */
+ public boolean isRunHeavyweightSkippableTasks() {
+ return myRunHeavyweightSkippableTasks;
+ }
+
+ /**
+ * Should we run the tasks marked with {@link ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum#HEAVYWEIGHT_SKIP_BY_DEFAULT}
+ *
+ * @since 7.4.0
+ */
+ public void setRunHeavyweightSkippableTasks(boolean theRunHeavyweightSkippableTasks) {
+ myRunHeavyweightSkippableTasks = theRunHeavyweightSkippableTasks;
+ }
+
public boolean isNoColumnShrink() {
return myNoColumnShrink;
}
@@ -131,14 +151,27 @@ public class HapiMigrator {
try (DriverTypeEnum.ConnectionProperties connectionProperties =
getDriverType().newConnectionProperties(getDataSource())) {
- newTaskList.forEach(next -> {
+ if (!isRunHeavyweightSkippableTasks()) {
+ newTaskList.removeIf(BaseTask::isHeavyweightSkippableTask);
+ }
+
+ boolean initializedSchema = false;
+ for (BaseTask next : newTaskList) {
+ if (initializedSchema && !next.hasFlag(TaskFlagEnum.RUN_DURING_SCHEMA_INITIALIZATION)) {
+ ourLog.info("Skipping task {} because schema is being initialized", next.getMigrationVersion());
+ recordTaskAsCompletedIfNotDryRun(next, 0L, true);
+ continue;
+ }
+
next.setDriverType(getDriverType());
next.setDryRun(isDryRun());
next.setNoColumnShrink(isNoColumnShrink());
next.setConnectionProperties(connectionProperties);
executeTask(next, retval);
- });
+
+ initializedSchema |= next.initializedSchema();
+ }
}
} catch (Exception e) {
ourLog.error("Migration failed", e);
@@ -167,13 +200,13 @@ public class HapiMigrator {
}
preExecute(theTask);
theTask.execute();
- postExecute(theTask, sw, true);
+ recordTaskAsCompletedIfNotDryRun(theTask, sw.getMillis(), true);
theMigrationResult.changes += theTask.getChangesCount();
theMigrationResult.executedStatements.addAll(theTask.getExecutedStatements());
theMigrationResult.succeededTasks.add(theTask);
} catch (SQLException | HapiMigrationException e) {
theMigrationResult.failedTasks.add(theTask);
- postExecute(theTask, sw, false);
+ recordTaskAsCompletedIfNotDryRun(theTask, sw.getMillis(), false);
String description = theTask.getDescription();
if (isBlank(description)) {
description = theTask.getClass().getSimpleName();
@@ -187,9 +220,9 @@ public class HapiMigrator {
myCallbacks.forEach(action -> action.preExecution(theTask));
}
- private void postExecute(BaseTask theNext, StopWatch theStopWatch, boolean theSuccess) {
+ private void recordTaskAsCompletedIfNotDryRun(BaseTask theNext, long theExecutionMillis, boolean theSuccess) {
if (!theNext.isDryRun()) {
- myHapiMigrationStorageSvc.saveTask(theNext, Math.toIntExact(theStopWatch.getMillis()), theSuccess);
+ myHapiMigrationStorageSvc.saveTask(theNext, Math.toIntExact(theExecutionMillis), theSuccess);
}
}
@@ -211,7 +244,7 @@ public class HapiMigrator {
}
public void setCallbacks(@Nonnull List theCallbacks) {
- Validate.notNull(theCallbacks);
+ Validate.notNull(theCallbacks, "theCallbacks must not be null");
myCallbacks = theCallbacks;
}
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskList.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskList.java
index 668b51095c9..17a56f5a48b 100644
--- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskList.java
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskList.java
@@ -29,6 +29,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.function.Consumer;
+import java.util.function.Predicate;
import java.util.stream.Collectors;
public class MigrationTaskList implements Iterable {
@@ -95,4 +96,12 @@ public class MigrationTaskList implements Iterable {
.reduce((first, second) -> second)
.orElse(null);
}
+
+ public void removeIf(Predicate theFilter) {
+ myTasks.removeIf(theFilter);
+ }
+
+ public BaseTask[] toTaskArray() {
+ return myTasks.toArray(new BaseTask[0]);
+ }
}
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskSkipper.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskSkipper.java
index 0e74e504380..6de5b05fe3d 100644
--- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskSkipper.java
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskSkipper.java
@@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.migrate;
import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask;
+import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -49,7 +50,7 @@ public class MigrationTaskSkipper {
for (BaseTask task : theTasks) {
if (skippedVersionSet.contains(task.getMigrationVersion())) {
ourLog.info("Will skip {}: {}", task.getMigrationVersion(), task.getDescription());
- task.setDoNothing(true);
+ task.addFlag(TaskFlagEnum.DO_NOTHING);
}
}
}
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTask.java
index d1e6ed52064..933b81d3e8a 100644
--- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTask.java
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTask.java
@@ -35,12 +35,6 @@ public class AddColumnTask extends BaseTableColumnTypeTask {
return new AddColumnTask(null, null, ColumnNameCase.ALL_LOWER, theColumnDriverMappingOverrides);
}
- public AddColumnTask() {
- this(null, null);
- setDryRun(true);
- myCheckForExistingTables = false;
- }
-
public AddColumnTask(String theProductVersion, String theSchemaVersion) {
super(theProductVersion, theSchemaVersion);
}
@@ -84,6 +78,7 @@ public class AddColumnTask extends BaseTableColumnTypeTask {
break;
case DERBY_EMBEDDED:
case POSTGRES_9_4:
+ case COCKROACHDB_21_1:
sql = "alter table " + getTableName() + " add column " + getColumnName() + " " + typeStatement;
break;
case MSSQL_2012:
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddTableByColumnTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddTableByColumnTask.java
index abf38c1672a..969a05b21c0 100644
--- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddTableByColumnTask.java
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddTableByColumnTask.java
@@ -56,7 +56,7 @@ public class AddTableByColumnTask extends BaseTableTask {
this(theProductVersion, theSchemaVersion, null);
}
- private AddTableByColumnTask(
+ public AddTableByColumnTask(
String theProductVersion, String theSchemaVersion, Comparator theColumnSortingRules) {
super(theProductVersion, theSchemaVersion);
myColumnSortingRules = theColumnSortingRules;
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java
index e3b9459fe06..41dedf315a7 100644
--- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java
@@ -46,6 +46,9 @@ public class ArbitrarySqlTask extends BaseTask {
private String myExecuteOnlyIfTableExists;
private List myConditionalOnExistenceOf = new ArrayList<>();
+ /**
+ * Constructor
+ */
public ArbitrarySqlTask(VersionEnum theRelease, String theVersion, String theTableName, String theDescription) {
super(theRelease.toString(), theVersion);
myTableName = theTableName;
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java
index 50b7859a0a9..f652fa0a938 100644
--- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java
@@ -22,10 +22,14 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.HapiMigrationException;
+import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import ca.uhn.fhir.system.HapiSystemProperties;
+import jakarta.annotation.Nonnull;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.ToStringBuilder;
+import org.apache.commons.lang3.builder.ToStringStyle;
import org.flywaydb.core.api.MigrationVersion;
import org.intellij.lang.annotations.Language;
import org.slf4j.Logger;
@@ -38,6 +42,7 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
+import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -51,11 +56,42 @@ public abstract class BaseTask {
private static final Pattern versionPattern = Pattern.compile(MIGRATION_VERSION_PATTERN);
private final String myProductVersion;
private final String mySchemaVersion;
+ private final List myPreconditions = new ArrayList<>();
+ private final EnumSet myFlags = EnumSet.noneOf(TaskFlagEnum.class);
+ private final List myExecutedStatements = new ArrayList<>();
+ /**
+ * Whether to check for existing tables
+ * before generating SQL
+ */
+ protected boolean myCheckForExistingTables = true;
+ /**
+ * Whether to generate the SQL in a 'readable format'
+ */
+ protected boolean myPrettyPrint = false;
+
private DriverTypeEnum.ConnectionProperties myConnectionProperties;
private DriverTypeEnum myDriverType;
private String myDescription;
private Integer myChangesCount = 0;
private boolean myDryRun;
+ private boolean myTransactional = true;
+ private Set myOnlyAppliesToPlatforms = new HashSet<>();
+ private boolean myNoColumnShrink;
+
+ protected BaseTask(String theProductVersion, String theSchemaVersion) {
+ myProductVersion = theProductVersion;
+ mySchemaVersion = theSchemaVersion;
+ }
+
+ /**
+ * Adds a flag if it's not already present, otherwise this call is ignored.
+ *
+ * @param theFlag The flag, must not be null
+ */
+ public BaseTask addFlag(@Nonnull TaskFlagEnum theFlag) {
+ myFlags.add(theFlag);
+ return this;
+ }
/**
* Some migrations can not be run in a transaction.
@@ -65,48 +101,12 @@ public abstract class BaseTask {
myTransactional = theTransactional;
}
- private boolean myTransactional = true;
- private boolean myDoNothing;
- private List myExecutedStatements = new ArrayList<>();
- private Set myOnlyAppliesToPlatforms = new HashSet<>();
- private boolean myNoColumnShrink;
- private boolean myFailureAllowed;
- private boolean myRunDuringSchemaInitialization;
- /**
- * Whether or not to check for existing tables
- * before generating SQL
- */
- protected boolean myCheckForExistingTables = true;
-
- /**
- * Whether or not to generate the SQL in a 'readable format'
- */
- protected boolean myPrettyPrint = false;
-
- protected BaseTask(String theProductVersion, String theSchemaVersion) {
- myProductVersion = theProductVersion;
- mySchemaVersion = theSchemaVersion;
- }
-
- public boolean isRunDuringSchemaInitialization() {
- return myRunDuringSchemaInitialization;
- }
-
public void setPrettyPrint(boolean thePrettyPrint) {
myPrettyPrint = thePrettyPrint;
}
- /**
- * Should this task run even if we're doing the very first initialization of an empty schema. By
- * default we skip most tasks during that pass, since they just take up time and the
- * schema should be fully initialized by the {@link InitializeSchemaTask}
- */
- public void setRunDuringSchemaInitialization(boolean theRunDuringSchemaInitialization) {
- myRunDuringSchemaInitialization = theRunDuringSchemaInitialization;
- }
-
public void setOnlyAppliesToPlatforms(Set theOnlyAppliesToPlatforms) {
- Validate.notNull(theOnlyAppliesToPlatforms);
+ Validate.notNull(theOnlyAppliesToPlatforms, "theOnlyAppliesToPlatforms must not be null");
myOnlyAppliesToPlatforms = theOnlyAppliesToPlatforms;
}
@@ -188,7 +188,7 @@ public abstract class BaseTask {
private Integer doExecuteSqlList(List theSqlStatements) {
int changesCount = 0;
- for (String nextSql : theSqlStatements) {
+ for (@Language("SQL") String nextSql : theSqlStatements) {
changesCount += doExecuteSql(nextSql);
}
@@ -206,7 +206,7 @@ public abstract class BaseTask {
}
return changesCount;
} catch (DataAccessException e) {
- if (myFailureAllowed) {
+ if (myFlags.contains(TaskFlagEnum.FAILURE_ALLOWED)) {
ourLog.info("Task {} did not exit successfully, but task is allowed to fail", getMigrationVersion());
ourLog.debug("Error was: {}", e.getMessage(), e);
return 0;
@@ -219,7 +219,7 @@ public abstract class BaseTask {
protected void captureExecutedStatement(
String theTableName, @Language("SQL") String theSql, Object... theArguments) {
- myExecutedStatements.add(new ExecutedStatement(theTableName, theSql, theArguments));
+ myExecutedStatements.add(new ExecutedStatement(mySchemaVersion, theTableName, theSql, theArguments));
}
public DriverTypeEnum.ConnectionProperties getConnectionProperties() {
@@ -250,10 +250,8 @@ public abstract class BaseTask {
return getConnectionProperties().newJdbcTemplate();
}
- private final List myPreconditions = new ArrayList<>();
-
public void execute() throws SQLException {
- if (myDoNothing) {
+ if (myFlags.contains(TaskFlagEnum.DO_NOTHING)) {
ourLog.info("Skipping stubbed task: {}", getDescription());
return;
}
@@ -278,14 +276,6 @@ public abstract class BaseTask {
protected abstract void doExecute() throws SQLException;
- protected boolean isFailureAllowed() {
- return myFailureAllowed;
- }
-
- public void setFailureAllowed(boolean theFailureAllowed) {
- myFailureAllowed = theFailureAllowed;
- }
-
public String getMigrationVersion() {
String releasePart = myProductVersion;
if (releasePart.startsWith("V")) {
@@ -296,6 +286,7 @@ public abstract class BaseTask {
return migrationVersion.getVersion();
}
+ @SuppressWarnings("StringConcatenationArgumentToLogCall")
protected void logInfo(Logger theLog, String theFormattedMessage, Object... theArguments) {
theLog.info(getMigrationVersion() + ": " + theFormattedMessage, theArguments);
}
@@ -308,23 +299,6 @@ public abstract class BaseTask {
}
}
- public void doNothing() {
- setDoNothing(true);
- }
-
- public void failureAllowed() {
- setFailureAllowed(true);
- }
-
- public boolean isDoNothing() {
- return myDoNothing;
- }
-
- public BaseTask setDoNothing(boolean theDoNothing) {
- myDoNothing = theDoNothing;
- return this;
- }
-
public void addPrecondition(ExecuteTaskPrecondition thePrecondition) {
myPreconditions.add(thePrecondition);
}
@@ -343,7 +317,6 @@ public abstract class BaseTask {
if (theObject == null || getClass().equals(theObject.getClass()) == false) {
return false;
}
- @SuppressWarnings("unchecked")
BaseTask otherObject = (BaseTask) theObject;
EqualsBuilder b = new EqualsBuilder();
@@ -357,17 +330,35 @@ public abstract class BaseTask {
return false;
}
+ public boolean isDoNothing() {
+ return myFlags.contains(TaskFlagEnum.DO_NOTHING);
+ }
+
+ public boolean isHeavyweightSkippableTask() {
+ return myFlags.contains(TaskFlagEnum.HEAVYWEIGHT_SKIP_BY_DEFAULT);
+ }
+
+ public boolean hasFlag(TaskFlagEnum theFlag) {
+ return myFlags.contains(theFlag);
+ }
+
public static class ExecutedStatement {
private final String mySql;
private final List
+
org.slf4j
diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/HasGetterOrSetterForAllJsonFields.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/HasGetterOrSetterForAllJsonFields.java
deleted file mode 100644
index ece18cc6af5..00000000000
--- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/HasGetterOrSetterForAllJsonFields.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*-
- * #%L
- * HAPI FHIR Test Utilities
- * %%
- * Copyright (C) 2014 - 2024 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.test.util;
-
-import ca.uhn.fhir.model.api.IModelJson;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import io.swagger.v3.oas.annotations.media.Schema;
-import jakarta.annotation.Nonnull;
-import org.hamcrest.Description;
-import org.hamcrest.Matcher;
-import org.hamcrest.TypeSafeMatcher;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.beans.BeanInfo;
-import java.beans.FeatureDescriptor;
-import java.beans.IntrospectionException;
-import java.beans.Introspector;
-import java.lang.reflect.Field;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-
-import static org.hamcrest.Matchers.hasItems;
-
-@Deprecated
-/**
- * @deprecated convert usages to HasGetterOrSetterForAllJsonFieldsAssert
- */
-public class HasGetterOrSetterForAllJsonFields extends TypeSafeMatcher> {
- private static final Logger ourLog = LoggerFactory.getLogger(HasGetterOrSetterForAllJsonFields.class);
-
- @Override
- public void describeTo(Description description) {
- description.appendText("All @JsonProperty annotated fields have getters and setters.");
- }
-
- @Override
- protected boolean matchesSafely(Class extends IModelJson> item) {
- List jsonPropertyFields = getJsonPropertyFields(item);
- Matcher> matcher = hasItems(jsonPropertyFields.toArray());
- List properties = getProperties(item);
- ourLog.info("{}: testing {} @JsonProperty fields", item.getSimpleName(), jsonPropertyFields.size());
- return matcher.matches(properties);
- }
-
- @Nonnull
- private List getJsonPropertyFields(Class extends IModelJson> item) {
- List fields = new ArrayList<>();
-
- populateFields(fields, item);
-
- return fields.stream()
- .filter(this::isJsonProperty)
- .filter(this::isNotCollection)
- .filter(this::isNotMap)
- .map(Field::getName)
- .map(this::stripPrefix)
- .map(this::stripUnderscoreSuffix)
- .sorted()
- .collect(Collectors.toList());
- }
-
- private boolean isNotCollection(Field theField) {
- return !Collection.class.isAssignableFrom(theField.getType());
- }
-
- private boolean isNotMap(Field theField) {
- return !Map.class.isAssignableFrom(theField.getType());
- }
-
- private boolean isJsonProperty(Field theField) {
- if (!theField.isAnnotationPresent(JsonProperty.class)) {
- return false;
- }
- Schema apiModelProperty = theField.getAnnotation(Schema.class);
- if (apiModelProperty != null && apiModelProperty.accessMode() == Schema.AccessMode.READ_ONLY) {
- return false;
- }
- return apiModelProperty == null || !apiModelProperty.hidden();
- }
-
- private String stripPrefix(String theFieldName) {
- if (theFieldName.startsWith("my")) {
- return theFieldName.substring(2, 3).toLowerCase() + theFieldName.substring(3);
- }
- return theFieldName;
- }
-
- private String stripUnderscoreSuffix(String theFieldName) {
- if (theFieldName.endsWith("_")) {
- return theFieldName.substring(0, theFieldName.length() - 1);
- }
- return theFieldName;
- }
-
- @Override
- protected void describeMismatchSafely(Class extends IModelJson> item, Description mismatchDescription) {
- mismatchDescription.appendText(" for class ").appendText(item.getName()).appendText(", ");
- List jsonFields = getJsonPropertyFields(item);
- Matcher> matcher = hasItems(jsonFields.toArray());
- List properties = getProperties(item);
- matcher.describeMismatch(properties, mismatchDescription);
- mismatchDescription.appendText("\n All non-collection @JsonProperty fields: " + String.join(", ", jsonFields));
- mismatchDescription.appendText("\n Have get/set methods for: " + String.join(", ", properties));
- }
-
- private List getProperties(Class extends IModelJson> item) {
- try {
- BeanInfo beanInfo = Introspector.getBeanInfo(item);
- return Arrays.stream(beanInfo.getPropertyDescriptors())
- .map(FeatureDescriptor::getName)
- .filter(name -> !"class".equals(name))
- .map(this::lowerCaseFirstLetter)
- .sorted()
- .collect(Collectors.toList());
- } catch (IntrospectionException e) {
- throw new AssertionError("Unable to introspect " + item.getName(), e);
- }
- }
-
- private String lowerCaseFirstLetter(String thePropertyName) {
- return thePropertyName.substring(0, 1).toLowerCase() + thePropertyName.substring(1);
- }
-
- private static void populateFields(List theFields, Class> theItem) {
- theFields.addAll(Arrays.asList(theItem.getDeclaredFields()));
-
- if (theItem.getSuperclass() != null) {
- populateFields(theFields, theItem.getSuperclass());
- }
- }
-
- public static HasGetterOrSetterForAllJsonFields hasGetterOrSetterForAllJsonFields() {
- return new HasGetterOrSetterForAllJsonFields();
- }
-}
diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/LogEventAssert.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/LogEventAssert.java
index 3e080895124..6dd8bdfce7c 100644
--- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/LogEventAssert.java
+++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/LogEventAssert.java
@@ -27,7 +27,4 @@ public class LogEventAssert extends AbstractAssert myListAppender = null;
- private Level mySavedLevel;
-
- /**
- *
- * @param theLogger the log to capture
- */
- public LogbackCaptureTestExtension(Logger theLogger) {
- myLogger = theLogger;
- myLevel = null;
- }
-
- /**
- *
- * @param theLogger the log to capture
- * @param theTestLogLevel the log Level to set on the target logger for the duration of the test
- */
- public LogbackCaptureTestExtension(Logger theLogger, Level theTestLogLevel) {
- myLogger = theLogger;
- myLevel = theTestLogLevel;
- }
-
- /**
- * @param theLoggerName the log name to capture
- */
- public LogbackCaptureTestExtension(String theLoggerName) {
- this((Logger) LoggerFactory.getLogger(theLoggerName));
- }
-
- /**
- * Capture the root logger - all lines.
- */
- public LogbackCaptureTestExtension() {
- this(org.slf4j.Logger.ROOT_LOGGER_NAME);
- }
-
- public LogbackCaptureTestExtension(String theLoggerName, Level theLevel) {
- this((Logger) LoggerFactory.getLogger(theLoggerName), theLevel);
- }
-
- public LogbackCaptureTestExtension(Class> theClass) {
- this(theClass.getName());
- }
-
- public LogbackCaptureTestExtension(Class> theClass, Level theLevel) {
- this(theClass.getName(), theLevel);
- }
-
- public LogbackCaptureTestExtension(org.slf4j.Logger theLogger) {
- this((Logger) theLogger);
- }
-
- /**
- * Returns a copy to avoid concurrent modification errors.
- * @return A copy of the log events so far.
- */
- public java.util.List getLogEvents() {
- // copy to avoid concurrent mod errors
- return new ArrayList<>(myListAppender.list);
- }
-
- /** Clear accumulated log events. */
- public void clearEvents() {
- myListAppender.list.clear();
- }
-
- public ListAppender getAppender() {
- return myListAppender;
- }
-
- @Override
- public void beforeEach(ExtensionContext context) throws Exception {
- setUp();
- }
-
- /**
- * Guts of beforeEach exposed for manual lifecycle.
- */
- public void setUp() {
- setUp(myLevel);
- }
-
- /**
- * Guts of beforeEach exposed for manual lifecycle.
- */
- public void setUp(Level theLevel) {
- myListAppender = new ListAppender<>();
- myListAppender.start();
- myLogger.addAppender(myListAppender);
- if (theLevel != null) {
- mySavedLevel = myLogger.getLevel();
- myLogger.setLevel(theLevel);
- }
- }
-
- @Override
- public void afterEach(ExtensionContext context) throws Exception {
- myLogger.detachAppender(myListAppender);
- myListAppender.stop();
- if (myLevel != null) {
- myLogger.setLevel(mySavedLevel);
- }
- }
-
-
- public List filterLoggingEventsWithMessageEqualTo(String theMessageText){
- return filterLoggingEventsWithPredicate(loggingEvent -> loggingEvent.getFormattedMessage().equals(theMessageText));
- }
-
- public List filterLoggingEventsWithMessageContaining(String theMessageText){
- return filterLoggingEventsWithPredicate(loggingEvent -> loggingEvent.getFormattedMessage().contains(theMessageText));
- }
-
- public List filterLoggingEventsWithPredicate(Predicate theLoggingEventPredicate){
- return getLogEvents()
- .stream()
- .filter(theLoggingEventPredicate)
- .collect(Collectors.toList());
- }
-
- /**
- * Extract the log messages from the logging events.
- * @return a copy of the List of log messages
- *
- */
- @Nonnull
- public List getLogMessages() {
- return getLogEvents().stream().map(ILoggingEvent::getMessage).collect(Collectors.toList());
- }
-
- // Hamcrest matcher support
- public static Matcher eventWithLevelAndMessageContains(@Nonnull Level theLevel, @Nonnull String thePartialMessage) {
- return new LogbackEventMatcher(theLevel, thePartialMessage);
- }
-
- public static Matcher eventWithLevel(@Nonnull Level theLevel) {
- return new LogbackEventMatcher(theLevel, null);
- }
-
- public static Matcher eventWithMessageContains(@Nonnull String thePartialMessage) {
- return new LogbackEventMatcher(null, thePartialMessage);
- }
-
- public static Matcher eventWithLevelAndMessageAndThrew(@Nonnull Level theLevel,
- @Nonnull String thePartialMessage,
- @Nonnull String theThrown)
- {
- return new LogbackEventMatcher(theLevel, thePartialMessage, theThrown);
- }
-
-}
diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/LogbackEventMatcher.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/LogbackEventMatcher.java
deleted file mode 100644
index 44af2e850d1..00000000000
--- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/LogbackEventMatcher.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*-
- * #%L
- * HAPI FHIR Test Utilities
- * %%
- * Copyright (C) 2014 - 2024 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.test.util;
-
-import ch.qos.logback.classic.Level;
-import ch.qos.logback.classic.spi.ILoggingEvent;
-import org.hamcrest.CustomTypeSafeMatcher;
-
-import jakarta.annotation.Nonnull;
-import jakarta.annotation.Nullable;
-
-/**
- * A Hamcrest matcher for junit assertions.
- * Matches on level, partial message, and/or a portion of the message contained by a throwable, if present.
- * @deprecated use {@link LogbackEventAssert}
- */
-@Deprecated
-public class LogbackEventMatcher extends CustomTypeSafeMatcher {
- @Nullable
- private final Level myLevel;
- @Nullable
- private final String myLogMessage;
- @Nullable
- private final String myThrownMessage;
-
- public LogbackEventMatcher(@Nullable Level theLevel, @Nullable String thePartialString) {
- this("log event", theLevel, thePartialString, null);
- }
-
- public LogbackEventMatcher(@Nullable Level theLevel, @Nullable String thePartialString, @Nullable String theThrownMessage) {
- this("log event", theLevel, thePartialString, theThrownMessage);
- }
-
- private LogbackEventMatcher(@Nonnull String description, Level theLevel,
- String thePartialString, String theThrownMessage)
- {
- super(makeDescription(description, theLevel, thePartialString, theThrownMessage));
- myLevel = theLevel;
- myLogMessage = thePartialString;
- myThrownMessage = theThrownMessage;
- }
-
- @Nonnull
- private static String makeDescription(String description, Level theLevel, String thePartialString, String theThrownMessage) {
- String msg = description;
- if (theLevel != null) {
- msg = msg + " with level at least " + theLevel;
- }
- if (thePartialString != null) {
- msg = msg + " containing string \"" + thePartialString + "\"";
-
- }
- if (thePartialString != null) {
- msg = msg + " and throwable with error message containing string \"" + theThrownMessage + "\"";
-
- }
- return msg;
- }
-
- @Override
- protected boolean matchesSafely(ILoggingEvent item) {
- return (myLevel == null || item.getLevel().isGreaterOrEqual(myLevel)) &&
- (myLogMessage == null || item.getFormattedMessage().contains(myLogMessage)) &&
- (myThrownMessage == null || item.getThrowableProxy() == null || item.getThrowableProxy().getMessage().contains(myThrownMessage));
- }
-}
diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/StaticLogbackCaptureTestExtension.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/StaticLogbackCaptureTestExtension.java
deleted file mode 100644
index d4e356aaa5a..00000000000
--- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/test/util/StaticLogbackCaptureTestExtension.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*-
- * #%L
- * HAPI FHIR Test Utilities
- * %%
- * Copyright (C) 2014 - 2024 Smile CDR, Inc.
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-package ca.uhn.test.util;
-
-import ch.qos.logback.classic.Level;
-import ch.qos.logback.classic.filter.ThresholdFilter;
-import ch.qos.logback.classic.spi.ILoggingEvent;
-import ch.qos.logback.core.read.ListAppender;
-import jakarta.annotation.Nonnull;
-import org.junit.jupiter.api.extension.AfterAllCallback;
-import org.junit.jupiter.api.extension.BeforeAllCallback;
-import org.junit.jupiter.api.extension.ExtensionContext;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.function.Predicate;
-import java.util.stream.Collectors;
-
-import static org.slf4j.Logger.ROOT_LOGGER_NAME;
-
-/**
- * This is a static wrapper around LogbackTestExtension for use in IT tests when you need to assert on App
- * startup log entries
- * @deprecated use {@link StaticLogbackTestExtension}
- */
-@Deprecated
-public class StaticLogbackCaptureTestExtension implements BeforeAllCallback, AfterAllCallback {
- private final LogbackCaptureTestExtension myLogbackCaptureTestExtension;
-
- public StaticLogbackCaptureTestExtension(LogbackCaptureTestExtension theLogbackCaptureTestExtension) {
- myLogbackCaptureTestExtension = theLogbackCaptureTestExtension;
- }
-
- public StaticLogbackCaptureTestExtension() {
- myLogbackCaptureTestExtension = new LogbackCaptureTestExtension();
- }
-
- public static StaticLogbackCaptureTestExtension withThreshold(Level theLevel) {
- LogbackCaptureTestExtension logbackCaptureTestExtension = new LogbackCaptureTestExtension();
- logbackCaptureTestExtension.setUp(theLevel);
- ThresholdFilter thresholdFilter = new ThresholdFilter();
- thresholdFilter.setLevel(theLevel.levelStr);
- logbackCaptureTestExtension.getAppender().addFilter(thresholdFilter);
-
- return new StaticLogbackCaptureTestExtension(logbackCaptureTestExtension);
- }
-
- @Override
- public void beforeAll(ExtensionContext theExtensionContext) throws Exception {
- if (myLogbackCaptureTestExtension.getAppender() == null) {
- myLogbackCaptureTestExtension.beforeEach(theExtensionContext);
- }
- }
-
- @Override
- public void afterAll(ExtensionContext theExtensionContext) throws Exception {
- myLogbackCaptureTestExtension.afterEach(theExtensionContext);
- }
-
- /**
- * Returns a copy to avoid concurrent modification errors.
- * @return A copy of the log events so far.
- */
- public java.util.List getLogEvents() {
- return myLogbackCaptureTestExtension.getLogEvents();
- }
-
- /** Clear accumulated log events. */
- public void clearEvents() {
- myLogbackCaptureTestExtension.clearEvents();
- }
-
- public ListAppender getAppender() {
- return myLogbackCaptureTestExtension.getAppender();
- }
-
- public List filterLoggingEventsWithMessageEqualTo(String theMessageText){
- return myLogbackCaptureTestExtension.filterLoggingEventsWithMessageEqualTo(theMessageText);
- }
-
- public List filterLoggingEventsWithMessageContaining(String theMessageText){
- return myLogbackCaptureTestExtension.filterLoggingEventsWithMessageContaining(theMessageText);
- }
-
- public List filterLoggingEventsWithPredicate(Predicate theLoggingEventPredicate){
- return myLogbackCaptureTestExtension.filterLoggingEventsWithPredicate(theLoggingEventPredicate);
- }
-
- /**
- * Extract the log messages from the logging events.
- * @return a copy of the List of log messages
- *
- */
- @Nonnull
- public List getLogMessages() {
- return myLogbackCaptureTestExtension.getLogMessages();
- }
-
-}
diff --git a/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/RequestValidatingInterceptorDstu3Test.java b/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/RequestValidatingInterceptorDstu3Test.java
index b679785f8ef..2d30fbe268e 100644
--- a/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/RequestValidatingInterceptorDstu3Test.java
+++ b/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/RequestValidatingInterceptorDstu3Test.java
@@ -31,7 +31,6 @@ import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
-import org.hamcrest.Matchers;
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
import org.hl7.fhir.dstu3.model.Enumerations.AdministrativeGender;
import org.hl7.fhir.dstu3.model.IdType;
diff --git a/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/ResponseValidatingInterceptorDstu3Test.java b/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/ResponseValidatingInterceptorDstu3Test.java
index f576d8e3af6..2f0199b684a 100644
--- a/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/ResponseValidatingInterceptorDstu3Test.java
+++ b/hapi-fhir-validation/src/test/java/ca/uhn/fhir/rest/server/ResponseValidatingInterceptorDstu3Test.java
@@ -24,7 +24,6 @@ import org.apache.http.HttpResponse;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
-import org.hamcrest.Matchers;
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
import org.hl7.fhir.dstu3.model.Enumerations.AdministrativeGender;
import org.hl7.fhir.dstu3.model.IdType;
diff --git a/hapi-fhir-validation/src/test/java/org/hl7/fhir/dstu2016may/hapi/validation/ResourceValidatorDstu2_1Test.java b/hapi-fhir-validation/src/test/java/org/hl7/fhir/dstu2016may/hapi/validation/ResourceValidatorDstu2_1Test.java
index c94f98102f2..1fbb525077e 100644
--- a/hapi-fhir-validation/src/test/java/org/hl7/fhir/dstu2016may/hapi/validation/ResourceValidatorDstu2_1Test.java
+++ b/hapi-fhir-validation/src/test/java/org/hl7/fhir/dstu2016may/hapi/validation/ResourceValidatorDstu2_1Test.java
@@ -12,7 +12,6 @@ import ca.uhn.fhir.validation.SchemaBaseValidator;
import ca.uhn.fhir.validation.ValidationResult;
import ca.uhn.fhir.validation.schematron.SchematronBaseValidator;
import org.apache.commons.io.IOUtils;
-import org.hamcrest.core.StringContains;
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
import org.hl7.fhir.dstu2016may.model.CodeableConcept;
import org.hl7.fhir.dstu2016may.model.Coding;
diff --git a/hapi-fhir-validation/src/test/java/org/hl7/fhir/dstu3/hapi/validation/ResourceValidatorDstu3Test.java b/hapi-fhir-validation/src/test/java/org/hl7/fhir/dstu3/hapi/validation/ResourceValidatorDstu3Test.java
index a2bc7cc9869..6464cd18b05 100644
--- a/hapi-fhir-validation/src/test/java/org/hl7/fhir/dstu3/hapi/validation/ResourceValidatorDstu3Test.java
+++ b/hapi-fhir-validation/src/test/java/org/hl7/fhir/dstu3/hapi/validation/ResourceValidatorDstu3Test.java
@@ -18,7 +18,6 @@ import ca.uhn.fhir.validation.ValidationResult;
import ca.uhn.fhir.validation.schematron.SchematronBaseValidator;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.Validate;
-import org.hamcrest.core.StringContains;
import org.hl7.fhir.common.hapi.validation.support.PrePopulatedValidationSupport;
import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain;
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
diff --git a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu2/Dstu2ResourceValidatorDstu2Test.java b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu2/Dstu2ResourceValidatorDstu2Test.java
index c97af2c13a5..0c099c90deb 100644
--- a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu2/Dstu2ResourceValidatorDstu2Test.java
+++ b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu2/Dstu2ResourceValidatorDstu2Test.java
@@ -33,9 +33,6 @@ import ca.uhn.fhir.validation.ValidationFailureException;
import ca.uhn.fhir.validation.ValidationResult;
import ca.uhn.fhir.validation.schematron.SchematronBaseValidator;
import org.apache.commons.io.IOUtils;
-import org.hamcrest.Matcher;
-import org.hamcrest.Matchers;
-import org.hamcrest.core.StringContains;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.ops4j.pax.exam.Configuration;
@@ -47,8 +44,6 @@ import org.ops4j.pax.exam.spi.reactors.PerClass;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.HAPI_FHIR_VALIDATION_DSTU2;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.KARAF;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.WRAP;
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
diff --git a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu21/JsonParserDstu2_1Test.java b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu21/JsonParserDstu2_1Test.java
index d913746f619..782683dd791 100644
--- a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu21/JsonParserDstu2_1Test.java
+++ b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu21/JsonParserDstu2_1Test.java
@@ -1,5 +1,6 @@
package ca.uhn.fhir.tests.integration.karaf.dstu21;
+import static org.assertj.core.api.Assertions.assertThat;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.ArrayList;
@@ -13,8 +14,6 @@ import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.LenientErrorHandler;
import ca.uhn.fhir.parser.StrictErrorHandler;
import com.google.common.collect.Sets;
-import org.hamcrest.Matcher;
-import org.hamcrest.Matchers;
import org.hl7.fhir.dstu2016may.model.Conformance;
import org.hl7.fhir.dstu2016may.model.PrimitiveType;
import org.hl7.fhir.instance.model.api.IIdType;
@@ -30,10 +29,6 @@ import org.ops4j.pax.exam.spi.reactors.PerClass;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.HAPI_FHIR_DSTU2_1;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.KARAF;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.WRAP;
-import static org.hamcrest.Matchers.contains;
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.empty;
-import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
@@ -668,7 +663,7 @@ public class JsonParserDstu2_1Test {
assertNull(ourCtx.newJsonParser().getStripVersionsFromReferences());
assertTrue(ourCtx.getParserOptions().isStripVersionsFromReferences());
- assertThat(ourCtx.getParserOptions().getDontStripVersionsFromReferencesAtPaths(), empty());
+ assertThat(ourCtx.getParserOptions().getDontStripVersionsFromReferencesAtPaths()).isEmpty();
org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient();
p.setManagingOrganization(new org.hl7.fhir.dstu2016may.model.Reference("http://foo.com/Organization/2/_history/1"));
@@ -1033,7 +1028,7 @@ public class JsonParserDstu2_1Test {
assertThat(ourCtx.newJsonParser().encodeResourceToString(p)).containsSubsequence("123", "ABC"));
assertThat(ourCtx.newJsonParser().setOmitResourceId(true).encodeResourceToString(p)).contains("ABC"));
- assertThat(ourCtx.newJsonParser().setOmitResourceId(true).encodeResourceToString(p), not(containsString("123")));
+ assertThat(ourCtx.newJsonParser().setOmitResourceId(true).encodeResourceToString(p)).doesNotContain("123");
}
@Test
@@ -1157,8 +1152,8 @@ public class JsonParserDstu2_1Test {
assertEquals("654321", res.getIdentifier().get(0).getValue());
assertEquals(true, res.getActive());
- assertThat(res.getIdentifier().get(0).getFormatCommentsPre(), contains("identifier comment 1", "identifier comment 2"));
- assertThat(res.getIdentifier().get(0).getUseElement().getFormatCommentsPre(), contains("use comment 1", "use comment 2"));
+ assertThat(res.getIdentifier().get(0).getFormatCommentsPre()).contains("identifier comment 1", "identifier comment 2");
+ assertThat(res.getIdentifier().get(0).getUseElement().getFormatCommentsPre()).contains("use comment 1", "use comment 2");
String encoded = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(res);
ourLog.info(encoded);
diff --git a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu21/XmlParserDstu2_1Test.java b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu21/XmlParserDstu2_1Test.java
index 077ac70d900..23d84658dc3 100644
--- a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu21/XmlParserDstu2_1Test.java
+++ b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu21/XmlParserDstu2_1Test.java
@@ -1,5 +1,6 @@
package ca.uhn.fhir.tests.integration.karaf.dstu21;
+import static org.assertj.core.api.Assertions.assertThat;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
@@ -15,10 +16,6 @@ import ca.uhn.fhir.parser.LenientErrorHandler;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
import com.google.common.collect.Sets;
-import org.hamcrest.Matcher;
-import org.hamcrest.Matchers;
-import org.hamcrest.core.StringContains;
-import org.hamcrest.text.StringContainsInOrder;
import org.hl7.fhir.dstu2016may.model.Address;
import org.hl7.fhir.dstu2016may.model.Appointment;
import org.hl7.fhir.dstu2016may.model.AuditEvent;
@@ -75,10 +72,6 @@ import org.xmlunit.diff.ElementSelectors;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.HAPI_FHIR_DSTU2_1;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.KARAF;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.WRAP;
-import static org.hamcrest.Matchers.contains;
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.empty;
-import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
@@ -723,7 +716,7 @@ public class XmlParserDstu2_1Test {
//@formatter:on
Patient parsed = ourCtx.newXmlParser().parseResource(Patient.class, enc);
- assertThat(parsed.getMeta().getProfile(), empty());
+ assertThat(parsed.getMeta().getProfile()).isEmpty();
List tagList = parsed.getMeta().getTag();
assertEquals(2, tagList.size());
@@ -1673,7 +1666,7 @@ public class XmlParserDstu2_1Test {
assertThat(ourCtx.newXmlParser().encodeResourceToString(p)).containsSubsequence("123", "ABC"));
assertThat(ourCtx.newXmlParser().setOmitResourceId(true).encodeResourceToString(p)).contains("ABC"));
- assertThat(ourCtx.newXmlParser().setOmitResourceId(true).encodeResourceToString(p), not(containsString("123")));
+ assertThat(ourCtx.newXmlParser().setOmitResourceId(true).encodeResourceToString(p)).doesNotContain("123");
}
@@ -1717,10 +1710,10 @@ public class XmlParserDstu2_1Test {
assertEquals("654321", res.getIdentifier().get(0).getValue());
assertEquals(true, res.getActive());
- assertThat(res.getIdElement().getFormatCommentsPre(), contains("pre resource comment"));
- assertThat(res.getIdentifier().get(0).getFormatCommentsPre(), contains("identifier comment 1", "identifier comment 2"));
- assertThat(res.getIdentifier().get(0).getUseElement().getFormatCommentsPre(), contains("use comment 1", "use comment 2"));
- assertThat(res.getActiveElement().getFormatCommentsPost(), contains("post resource comment"));
+ assertThat(res.getIdElement().getFormatCommentsPre()).contains("pre resource comment");
+ assertThat(res.getIdentifier().get(0).getFormatCommentsPre()).contains("identifier comment 1", "identifier comment 2");
+ assertThat(res.getIdentifier().get(0).getUseElement().getFormatCommentsPre()).contains("use comment 1", "use comment 2");
+ assertThat(res.getActiveElement().getFormatCommentsPost()).contains("post resource comment");
String encoded = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(res);
ourLog.info(encoded);
diff --git a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu2hl7org/JsonParserHl7OrgDstu2Test.java b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu2hl7org/JsonParserHl7OrgDstu2Test.java
index f22aae86602..974847ea5e3 100644
--- a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu2hl7org/JsonParserHl7OrgDstu2Test.java
+++ b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu2hl7org/JsonParserHl7OrgDstu2Test.java
@@ -12,11 +12,6 @@ import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.Constants;
import org.apache.commons.io.IOUtils;
-import org.hamcrest.Matcher;
-import org.hamcrest.Matchers;
-import org.hamcrest.core.IsNot;
-import org.hamcrest.core.StringContains;
-import org.hamcrest.text.StringContainsInOrder;
import org.hl7.fhir.dstu2.model.Address;
import org.hl7.fhir.dstu2.model.Address.AddressUse;
import org.hl7.fhir.dstu2.model.Address.AddressUseEnumFactory;
@@ -65,8 +60,6 @@ import org.xml.sax.SAXException;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.HAPI_FHIR_HL7ORG_DSTU2;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.KARAF;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.WRAP;
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
diff --git a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu2hl7org/XmlParserHl7OrgDstu2Test.java b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu2hl7org/XmlParserHl7OrgDstu2Test.java
index 0f72daefd67..78e9c1fb10b 100644
--- a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu2hl7org/XmlParserHl7OrgDstu2Test.java
+++ b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu2hl7org/XmlParserHl7OrgDstu2Test.java
@@ -17,11 +17,6 @@ import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.Constants;
import org.apache.commons.io.IOUtils;
-import org.hamcrest.Matcher;
-import org.hamcrest.Matchers;
-import org.hamcrest.core.IsNot;
-import org.hamcrest.core.StringContains;
-import org.hamcrest.text.StringContainsInOrder;
import org.hl7.fhir.dstu2.model.Address;
import org.hl7.fhir.dstu2.model.CodeableConcept;
import org.hl7.fhir.dstu2.model.Composition;
@@ -58,8 +53,6 @@ import org.xmlunit.diff.ElementSelectors;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.HAPI_FHIR_HL7ORG_DSTU2;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.KARAF;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.WRAP;
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
diff --git a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu3/Dstu3JsonParserTest.java b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu3/Dstu3JsonParserTest.java
index f5ea68fe087..fd62c0da55e 100644
--- a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu3/Dstu3JsonParserTest.java
+++ b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu3/Dstu3JsonParserTest.java
@@ -1,5 +1,6 @@
package ca.uhn.fhir.tests.integration.karaf.dstu3;
+import static org.assertj.core.api.Assertions.assertThat;
import java.io.IOException;
import java.io.StringReader;
import java.math.BigDecimal;
@@ -14,8 +15,6 @@ import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.LenientErrorHandler;
import ca.uhn.fhir.parser.StrictErrorHandler;
import com.google.common.collect.Sets;
-import org.hamcrest.Matcher;
-import org.hamcrest.Matchers;
import org.hl7.fhir.dstu3.model.AuditEvent;
import org.hl7.fhir.dstu3.model.Basic;
import org.hl7.fhir.dstu3.model.Binary;
@@ -62,10 +61,6 @@ import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.HAPI_FHIR_DSTU3
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.KARAF;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.WRAP;
import static org.apache.commons.lang3.StringUtils.countMatches;
-import static org.hamcrest.Matchers.contains;
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.empty;
-import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
@@ -871,7 +866,7 @@ public class Dstu3JsonParserTest {
assertNull(ourCtx.newJsonParser().getStripVersionsFromReferences());
assertTrue(ourCtx.getParserOptions().isStripVersionsFromReferences());
- assertThat(ourCtx.getParserOptions().getDontStripVersionsFromReferencesAtPaths(), empty());
+ assertThat(ourCtx.getParserOptions().getDontStripVersionsFromReferencesAtPaths()).isEmpty();
Patient p = new Patient();
p.setManagingOrganization(new Reference("http://foo.com/Organization/2/_history/1"));
@@ -1264,7 +1259,7 @@ public class Dstu3JsonParserTest {
assertThat(ourCtx.newJsonParser().encodeResourceToString(p)).containsSubsequence("123", "ABC"));
assertThat(ourCtx.newJsonParser().setOmitResourceId(true).encodeResourceToString(p)).contains("ABC"));
- assertThat(ourCtx.newJsonParser().setOmitResourceId(true).encodeResourceToString(p), not(containsString("123")));
+ assertThat(ourCtx.newJsonParser().setOmitResourceId(true).encodeResourceToString(p)).doesNotContain("123");
}
@Test
@@ -1430,8 +1425,8 @@ public class Dstu3JsonParserTest {
assertEquals("654321", res.getIdentifier().get(0).getValue());
assertEquals(true, res.getActive());
- assertThat(res.getIdentifier().get(0).getFormatCommentsPre(), contains("identifier comment 1", "identifier comment 2"));
- assertThat(res.getIdentifier().get(0).getUseElement().getFormatCommentsPre(), contains("use comment 1", "use comment 2"));
+ assertThat(res.getIdentifier().get(0).getFormatCommentsPre()).contains("identifier comment 1", "identifier comment 2");
+ assertThat(res.getIdentifier().get(0).getUseElement().getFormatCommentsPre()).contains("use comment 1", "use comment 2");
String encoded = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(res);
ourLog.info(encoded);
diff --git a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu3/Dstu3XmlParserTest.java b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu3/Dstu3XmlParserTest.java
index 8a83f568f7c..6e968755a98 100644
--- a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu3/Dstu3XmlParserTest.java
+++ b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu3/Dstu3XmlParserTest.java
@@ -1,5 +1,6 @@
package ca.uhn.fhir.tests.integration.karaf.dstu3;
+import static org.assertj.core.api.Assertions.assertThat;
import java.io.IOException;
import java.io.StringReader;
import java.nio.charset.StandardCharsets;
@@ -20,10 +21,6 @@ import com.google.common.collect.Sets;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
-import org.hamcrest.Matcher;
-import org.hamcrest.Matchers;
-import org.hamcrest.core.StringContains;
-import org.hamcrest.text.StringContainsInOrder;
import org.hl7.fhir.dstu3.model.Address;
import org.hl7.fhir.dstu3.model.AllergyIntolerance;
import org.hl7.fhir.dstu3.model.Annotation;
@@ -94,11 +91,6 @@ import org.xmlunit.diff.ElementSelectors;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.HAPI_FHIR_DSTU3;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.KARAF;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.WRAP;
-import static org.hamcrest.Matchers.contains;
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.empty;
-import static org.hamcrest.Matchers.not;
-import static org.hamcrest.Matchers.startsWith;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
@@ -831,7 +823,7 @@ public class Dstu3XmlParserTest {
""));
Patient parsed = ourCtx.newXmlParser().parseResource(Patient.class, enc);
- assertThat(parsed.getMeta().getProfile(), empty());
+ assertThat(parsed.getMeta().getProfile()).isEmpty();
List tagList = parsed.getMeta().getTag();
assertEquals(2, tagList.size());
@@ -2085,7 +2077,7 @@ public class Dstu3XmlParserTest {
assertThat(ourCtx.newXmlParser().encodeResourceToString(p)).containsSubsequence("123", "ABC"));
assertThat(ourCtx.newXmlParser().setOmitResourceId(true).encodeResourceToString(p)).contains("ABC"));
- assertThat(ourCtx.newXmlParser().setOmitResourceId(true).encodeResourceToString(p), not(containsString("123")));
+ assertThat(ourCtx.newXmlParser().setOmitResourceId(true).encodeResourceToString(p)).doesNotContain("123");
}
@Test
@@ -2167,10 +2159,10 @@ public class Dstu3XmlParserTest {
assertEquals("654321", res.getIdentifier().get(0).getValue());
assertEquals(true, res.getActive());
- assertThat(res.getIdElement().getFormatCommentsPre(), contains("pre resource comment"));
- assertThat(res.getIdentifier().get(0).getFormatCommentsPre(), contains("identifier comment 1", "identifier comment 2"));
- assertThat(res.getIdentifier().get(0).getUseElement().getFormatCommentsPre(), contains("use comment 1", "use comment 2"));
- assertThat(res.getActiveElement().getFormatCommentsPost(), contains("post resource comment"));
+ assertThat(res.getIdElement().getFormatCommentsPre()).contains("pre resource comment");
+ assertThat(res.getIdentifier().get(0).getFormatCommentsPre()).contains("identifier comment 1", "identifier comment 2");
+ assertThat(res.getIdentifier().get(0).getUseElement().getFormatCommentsPre()).contains("use comment 1", "use comment 2");
+ assertThat(res.getActiveElement().getFormatCommentsPost()).contains("post resource comment");
String encoded = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(res);
ourLog.info(encoded);
diff --git a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu3/ResourceValidatorDstu3FeatureTest.java b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu3/ResourceValidatorDstu3FeatureTest.java
index 532a84e14ea..edf76eac0c4 100644
--- a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu3/ResourceValidatorDstu3FeatureTest.java
+++ b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/dstu3/ResourceValidatorDstu3FeatureTest.java
@@ -8,7 +8,6 @@ import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.tests.integration.karaf.ValidationConstants;
import ca.uhn.fhir.validation.schematron.SchematronBaseValidator;
-import org.hamcrest.core.StringContains;
import org.hl7.fhir.r5.hapi.validation.FhirInstanceValidator;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
diff --git a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/r4/R4JsonParserTest.java b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/r4/R4JsonParserTest.java
index e02111b439e..ff601ad1fb2 100644
--- a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/r4/R4JsonParserTest.java
+++ b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/r4/R4JsonParserTest.java
@@ -19,8 +19,6 @@ import org.ops4j.pax.exam.spi.reactors.PerClass;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.HAPI_FHIR_R4;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.KARAF;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.WRAP;
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.core.IsNot.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.mockito.ArgumentMatchers.contains;
diff --git a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/r4/R4XmlParserTest.java b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/r4/R4XmlParserTest.java
index d9f893fb0f6..d1a0f53ad75 100644
--- a/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/r4/R4XmlParserTest.java
+++ b/osgi/hapi-fhir-karaf-integration-tests/src/test/java/ca/uhn/fhir/tests/integration/karaf/r4/R4XmlParserTest.java
@@ -6,7 +6,6 @@ import java.util.Set;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.parser.IParser;
import com.google.common.collect.Sets;
-import org.hamcrest.core.IsNot;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.Test;
@@ -20,7 +19,6 @@ import org.ops4j.pax.exam.spi.reactors.PerClass;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.HAPI_FHIR_R4;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.KARAF;
import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.WRAP;
-import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.ops4j.pax.exam.CoreOptions.mavenBundle;
From 472984ac65cbf3e9076652cd9dabccf66e1655a9 Mon Sep 17 00:00:00 2001
From: Martha Mitran
Date: Tue, 18 Jun 2024 14:57:04 -0700
Subject: [PATCH 04/19] Document pointcut to use to modify request parameters
(#6019)
* Document pointcut to use to modify request parameters. Add a test to exemplify.
* Add another alternate pointcut to use
---
.../ca/uhn/fhir/interceptor/api/Pointcut.java | 17 ++++----
.../provider/r4/ResourceProviderR4Test.java | 43 +++++++++++++++++--
2 files changed, 49 insertions(+), 11 deletions(-)
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
index 016512e537d..cac6a1725a6 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
@@ -321,9 +321,12 @@ public enum Pointcut implements IPointcut {
* This hook is invoked before an incoming request is processed. Note that this method is called
* after the server has begun preparing the response to the incoming client request.
* As such, it is not able to supply a response to the incoming request in the way that
- * SERVER_INCOMING_REQUEST_PRE_PROCESSED and
- * {@link #SERVER_INCOMING_REQUEST_POST_PROCESSED}
- * are.
+ * SERVER_INCOMING_REQUEST_PRE_PROCESSED and {@link #SERVER_INCOMING_REQUEST_POST_PROCESSED} are.
+ * At this point the request has already been passed to the handler so any changes
+ * (e.g. adding parameters) will not be considered.
+ * If you'd like to modify request parameters before they are passed to the handler,
+ * use {@link Pointcut#SERVER_INCOMING_REQUEST_PRE_HANDLER_SELECTED} or {@link Pointcut#SERVER_INCOMING_REQUEST_POST_PROCESSED}.
+ * If you are attempting to modify a search before it occurs, use {@link Pointcut#STORAGE_PRESEARCH_REGISTERED}.
*
* Hooks may accept the following parameters:
*
@@ -902,7 +905,6 @@ public enum Pointcut implements IPointcut {
* canonical subscription such as adding headers, modifying the channel
* endpoint, etc.
* Furthermore, you may modify the outgoing message wrapper, for example adding headers via ResourceModifiedJsonMessage field.
- *
*
* Hooks may accept the following parameters:
*
@@ -1122,7 +1124,6 @@ public enum Pointcut implements IPointcut {
* Storage Hook:
* Invoked when a Bulk Export job is being kicked off. Hook methods may modify
* the request, or raise an exception to prevent it from being initiated.
- *
* This hook is not guaranteed to be called before permission checks, and so
* anu implementers should be cautious of changing the options in ways that would
* affect permissions.
@@ -1192,7 +1193,7 @@ public enum Pointcut implements IPointcut {
/**
* Storage Hook:
- * Invoked when a set of resources are about to be deleted and expunged via url like http://localhost/Patient?active=false&_expunge=true
+ * Invoked when a set of resources are about to be deleted and expunged via url like {@code http://localhost/Patient?active=false&_expunge=true}.
*
* Hooks may accept the following parameters:
*
@@ -1228,7 +1229,7 @@ public enum Pointcut implements IPointcut {
/**
* Storage Hook:
- * Invoked when a batch of resource pids are about to be deleted and expunged via url like http://localhost/Patient?active=false&_expunge=true
+ * Invoked when a batch of resource pids are about to be deleted and expunged via url like {@code http://localhost/Patient?active=false&_expunge=true}.
*
* Hooks may accept the following parameters:
*
@@ -2957,7 +2958,6 @@ public enum Pointcut implements IPointcut {
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
"ca.uhn.fhir.jpa.util.SqlQueryList"),
- @Deprecated(since = "7.2.0 - Use STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX instead.")
/**
* Deprecated but still supported. Will eventually be removed. Please use Pointcut.STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX
* Binary Blob Prefix Assigning Hook:
@@ -2980,6 +2980,7 @@ public enum Pointcut implements IPointcut {
* Hooks should return String, which represents the full prefix to be applied to the blob.
*
*/
+ @Deprecated(since = "7.2.0 - Use STORAGE_BINARY_ASSIGN_BINARY_CONTENT_ID_PREFIX instead.")
STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX(
String.class,
"ca.uhn.fhir.rest.api.server.RequestDetails",
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
index f33bbcd2c28..14fb2bb0d7e 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
@@ -3,6 +3,8 @@ package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.i18n.HapiLocalizer;
import ca.uhn.fhir.i18n.Msg;
+import ca.uhn.fhir.interceptor.api.Hook;
+import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
@@ -44,12 +46,16 @@ import ca.uhn.fhir.rest.gclient.NumberClientParam;
import ca.uhn.fhir.rest.gclient.StringClientParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
+import ca.uhn.fhir.rest.param.TokenParam;
+import ca.uhn.fhir.rest.param.TokenParamModifier;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
+import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
+import ca.uhn.fhir.rest.server.util.ICachedSearchDetails;
import ca.uhn.fhir.util.ClasspathUtil;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.TestUtil;
@@ -198,15 +204,12 @@ import static ca.uhn.fhir.util.TestUtil.sleepAtLeast;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy;
-import static org.junit.jupiter.api.Assertions.fail;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
-import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
-
import static org.mockito.Mockito.when;
@SuppressWarnings("Duplicates")
@@ -5324,6 +5327,40 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
}
+ @Test
+ public void testSearchWithParameterAddedInInterceptor() {
+ Object interceptor = new Object() {
+ @Hook(Pointcut.STORAGE_PRESEARCH_REGISTERED)
+ public void storagePreSearchRegistered(
+ ICachedSearchDetails theCachedSearchDetails,
+ RequestDetails theRequestDetails,
+ ServletRequestDetails theServletRequestDetails,
+ SearchParameterMap theSearchParameterMap) {
+ theSearchParameterMap.add("_security", new TokenParam("http://system", "security1").setModifier(TokenParamModifier.NOT));
+ }
+ };
+ myInterceptorRegistry.registerInterceptor(interceptor);
+
+ try {
+ final Patient patient1 = new Patient().setActive(true);
+ patient1.getMeta().addSecurity("http://system", "security1", "Tag 1");
+ MethodOutcome outcome1 = myPatientDao.create(patient1, mySrd);
+ assertTrue(outcome1.getCreated());
+
+ final Patient patient2 = new Patient().setActive(true);
+ patient2.getMeta().addSecurity("http://system", "security2", "Tag 2");
+ MethodOutcome outcome2 = myPatientDao.create(patient2, mySrd);
+ assertTrue(outcome2.getCreated());
+ String idForPatient2 = outcome2.getId().toUnqualifiedVersionless().getValue();
+
+ IBaseBundle bundle = myClient.search().forResource("Patient").execute();
+ List ids = toUnqualifiedVersionlessIdValues(bundle);
+ assertThat(ids).containsExactly(idForPatient2);
+ } finally {
+ myInterceptorRegistry.unregisterInterceptor(interceptor);
+ }
+ }
+
@Test
public void testSelfReferentialInclude() {
Location loc1 = new Location();
From 60f456c65572f1a47551bf533bdedc1a69807f3a Mon Sep 17 00:00:00 2001
From: TipzCM
Date: Wed, 19 Jun 2024 17:00:37 -0400
Subject: [PATCH 05/19] mdm multidelete golden resource final resource pair
throws (#6001)
* fixed a bug with mdm and multidelete
---
.../ca/uhn/fhir/interceptor/api/Pointcut.java | 8 +-
...final-source-resource-together-throws.yaml | 8 ++
.../fhir/jpa/dao/BaseHapiFhirResourceDao.java | 7 +-
.../uhn/fhir/jpa/mdm/helper/MdmHelperR4.java | 3 +-
.../interceptor/MdmStorageInterceptorIT.java | 72 +++++++++++++++
.../interceptor/MdmStorageInterceptor.java | 88 ++++++++++++++-----
6 files changed, 158 insertions(+), 28 deletions(-)
create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6000-mdm-multidelete-golden-and-final-source-resource-together-throws.yaml
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
index cac6a1725a6..fd23e3160fb 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
@@ -1576,11 +1576,11 @@ public enum Pointcut implements IPointcut {
/**
* Storage Hook:
- * Invoked before a resource will be created, immediately before the resource
- * is persisted to the database.
+ * Invoked before a resource will be deleted, immediately before the resource
+ * is removed from the database.
*
- * Hooks will have access to the contents of the resource being created
- * and may choose to make modifications to it. These changes will be
+ * Hooks will have access to the contents of the resource being deleted
+ * and may choose to make modifications related to it. These changes will be
* reflected in permanent storage.
*
* Hooks may accept the following parameters:
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6000-mdm-multidelete-golden-and-final-source-resource-together-throws.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6000-mdm-multidelete-golden-and-final-source-resource-together-throws.yaml
new file mode 100644
index 00000000000..6dc27a0aae2
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6000-mdm-multidelete-golden-and-final-source-resource-together-throws.yaml
@@ -0,0 +1,8 @@
+---
+type: fix
+issue: 6000
+title: "In an MDM enabled system with multi-delete enabled, deleting
+ both the final source resource and it's linked golden resource
+ at the same time results in an error being thrown.
+ This has been fixed.
+"
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
index e6e80b214ea..38100cc0519 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
@@ -908,7 +908,8 @@ public abstract class BaseHapiFhirResourceDao extends B
RequestDetails theRequestDetails,
TransactionDetails theTransactionDetails) {
StopWatch w = new StopWatch();
- TransactionDetails transactionDetails = new TransactionDetails();
+ TransactionDetails transactionDetails =
+ theTransactionDetails != null ? theTransactionDetails : new TransactionDetails();
List deletedResources = new ArrayList<>();
List> resolvedIds =
@@ -924,6 +925,8 @@ public abstract class BaseHapiFhirResourceDao extends B
T resourceToDelete = myJpaStorageResourceParser.toResource(myResourceType, entity, null, false);
+ transactionDetails.addDeletedResourceId(pid);
+
// Notify IServerOperationInterceptors about pre-action call
HookParams hooks = new HookParams()
.add(IBaseResource.class, resourceToDelete)
@@ -988,8 +991,6 @@ public abstract class BaseHapiFhirResourceDao extends B
deletedResources.size(),
w.getMillis());
- theTransactionDetails.addDeletedResourceIds(theResourceIds);
-
DeleteMethodOutcome retVal = new DeleteMethodOutcome();
retVal.setDeletedEntities(deletedResources);
retVal.setOperationOutcome(oo);
diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/helper/MdmHelperR4.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/helper/MdmHelperR4.java
index ec9bab6755b..d6d397988d5 100644
--- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/helper/MdmHelperR4.java
+++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/helper/MdmHelperR4.java
@@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.mdm.model.mdmevents.MdmLinkEvent;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.TransactionLogMessages;
import ca.uhn.fhir.rest.server.messaging.ResourceOperationMessage;
import ca.uhn.test.concurrency.PointcutLatch;
@@ -51,7 +52,7 @@ public class MdmHelperR4 extends BaseMdmHelper {
String resourceType = myFhirContext.getResourceType(theResource);
IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceType);
- return isExternalHttpRequest ? dao.create(theResource, myMockSrd): dao.create(theResource);
+ return isExternalHttpRequest ? dao.create(theResource, myMockSrd): dao.create(theResource, new SystemRequestDetails());
}
public DaoMethodOutcome doUpdateResource(IBaseResource theResource, boolean isExternalHttpRequest) {
diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptorIT.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptorIT.java
index 98064590372..3224104cf35 100644
--- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptorIT.java
+++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmStorageInterceptorIT.java
@@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.mdm.interceptor;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
+import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.entity.MdmLink;
import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test;
@@ -16,6 +17,7 @@ import ca.uhn.fhir.mdm.model.CanonicalEID;
import ca.uhn.fhir.mdm.model.MdmCreateOrUpdateParams;
import ca.uhn.fhir.mdm.model.MdmTransactionContext;
import ca.uhn.fhir.mdm.rules.config.MdmSettings;
+import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
@@ -27,6 +29,8 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.ContactPoint;
+import org.hl7.fhir.r4.model.DateType;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.Medication;
import org.hl7.fhir.r4.model.Organization;
@@ -34,11 +38,14 @@ import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.SearchParameter;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Example;
import org.springframework.test.context.ContextConfiguration;
+import java.util.Collections;
import java.util.Date;
import java.util.List;
@@ -50,6 +57,7 @@ import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.slf4j.LoggerFactory.getLogger;
@@ -101,6 +109,70 @@ public class MdmStorageInterceptorIT extends BaseMdmR4Test {
assertLinkCount(0);
}
+ @ParameterizedTest
+ @ValueSource(booleans = { true, false })
+ public void deleteResourcesByUrl_withMultipleDeleteCatchingSourceAndGoldenResource_deletesWithoutThrowing(boolean theIncludeOtherResources) throws InterruptedException {
+ // setup
+ boolean allowMultipleDelete = myStorageSettings.isAllowMultipleDelete();
+ myStorageSettings.setAllowMultipleDelete(true);
+
+ int linkCount = 0;
+ int resourceCount = 0;
+ myMdmHelper.createWithLatch(buildJanePatient());
+ resourceCount += 2; // patient + golden
+ linkCount++;
+
+ // add some other resources to make it more complex
+ if (theIncludeOtherResources) {
+ Date birthday = new Date();
+ Patient patient = new Patient();
+ patient.getNameFirstRep().addGiven("yui");
+ patient.setBirthDate(birthday);
+ patient.setTelecom(Collections.singletonList(new ContactPoint()
+ .setSystem(ContactPoint.ContactPointSystem.PHONE)
+ .setValue("555-567-5555")));
+ DateType dateType = new DateType(birthday);
+ patient.addIdentifier().setSystem(TEST_ID_SYSTEM).setValue("ID.YUI.123");
+ dateType.setPrecision(TemporalPrecisionEnum.DAY);
+ patient.setBirthDateElement(dateType);
+ patient.setActive(true);
+ for (int i = 0; i < 2; i++) {
+ String familyName = i == 0 ? "hirasawa" : "kotegawa";
+ patient.getNameFirstRep().setFamily(familyName);
+ myMdmHelper.createWithLatch(patient);
+ resourceCount++;
+ linkCount++; // every resource creation creates 1 link
+ }
+ resourceCount++; // for the Golden Resource
+
+ // verify we have at least this many resources
+ SearchParameterMap map = new SearchParameterMap();
+ map.setLoadSynchronous(true);
+ IBundleProvider provider = myPatientDao.search(map, new SystemRequestDetails());
+ assertEquals(resourceCount, provider.size());
+
+ // verify we have the links
+ assertEquals(linkCount, myMdmLinkDao.count());
+ }
+
+ try {
+ // test
+ // filter will delete everything
+ DeleteMethodOutcome outcome = myPatientDao.deleteByUrl("Patient?_lastUpdated=ge2024-01-01", new SystemRequestDetails());
+
+ // validation
+ assertNotNull(outcome);
+ List links = myMdmLinkDao.findAll();
+ assertTrue(links.isEmpty());
+ SearchParameterMap map = new SearchParameterMap();
+ map.setLoadSynchronous(true);
+ IBundleProvider provider = myPatientDao.search(map, new SystemRequestDetails());
+ assertTrue(provider.getAllResources().isEmpty());
+ } finally {
+ myStorageSettings.setAllowMultipleDelete(allowMultipleDelete);
+ }
+ }
+
@Test
public void testGoldenResourceDeleted_whenOnlyMatchedResourceDeleted() throws InterruptedException {
// Given
diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmStorageInterceptor.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmStorageInterceptor.java
index c7819736162..1851e12d7b7 100644
--- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmStorageInterceptor.java
+++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmStorageInterceptor.java
@@ -61,8 +61,10 @@ import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Collections;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
@@ -70,15 +72,21 @@ import static ca.uhn.fhir.mdm.api.MdmMatchResultEnum.MATCH;
import static ca.uhn.fhir.mdm.api.MdmMatchResultEnum.NO_MATCH;
import static ca.uhn.fhir.mdm.api.MdmMatchResultEnum.POSSIBLE_MATCH;
+@SuppressWarnings("rawtypes")
@Service
public class MdmStorageInterceptor implements IMdmStorageInterceptor {
+ private static final String GOLDEN_RESOURCES_TO_DELETE = "GR_TO_DELETE";
+
private static final Logger ourLog = LoggerFactory.getLogger(MdmStorageInterceptor.class);
// Used to bypass trying to remove mdm links associated to a resource when running mdm-clear batch job, which
// deletes all links beforehand, and impacts performance for no action
private static final ThreadLocal ourLinksDeletedBeforehand = ThreadLocal.withInitial(() -> Boolean.FALSE);
+ @Autowired
+ private IMdmClearHelperSvc extends IResourcePersistentId>> myIMdmClearHelperSvc;
+
@Autowired
private IExpungeEverythingService myExpungeEverythingService;
@@ -126,7 +134,7 @@ public class MdmStorageInterceptor implements IMdmStorageInterceptor {
// If running in single EID mode, forbid multiple eids.
if (myMdmSettings.isPreventMultipleEids()) {
- ourLog.debug("Forbidding multiple EIDs on ", theBaseResource);
+ ourLog.debug("Forbidding multiple EIDs on {}", theBaseResource);
forbidIfHasMultipleEids(theBaseResource);
}
@@ -159,7 +167,7 @@ public class MdmStorageInterceptor implements IMdmStorageInterceptor {
// If running in single EID mode, forbid multiple eids.
if (myMdmSettings.isPreventMultipleEids()) {
- ourLog.debug("Forbidding multiple EIDs on ", theUpdatedResource);
+ ourLog.debug("Forbidding multiple EIDs on {}", theUpdatedResource);
forbidIfHasMultipleEids(theUpdatedResource);
}
@@ -188,17 +196,41 @@ public class MdmStorageInterceptor implements IMdmStorageInterceptor {
}
}
- @Autowired
- private IMdmClearHelperSvc extends IResourcePersistentId>> myIMdmClearHelperSvc;
+ @Hook(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED)
+ public void deletePostCommit(
+ RequestDetails theRequest, IBaseResource theResource, TransactionDetails theTransactionDetails) {
+ Set goldenResourceIds = theTransactionDetails.getUserData(GOLDEN_RESOURCES_TO_DELETE);
+ if (goldenResourceIds != null) {
+ for (IResourcePersistentId goldenPid : goldenResourceIds) {
+ if (!theTransactionDetails.getDeletedResourceIds().contains(goldenPid)) {
+ IFhirResourceDao> dao = myDaoRegistry.getResourceDao(theResource);
+ deleteGoldenResource(goldenPid, dao, theRequest);
+ /*
+ * We will add the removed id to the deleted list so that
+ * the deletedResourceId list is accurte for what has been
+ * deleted.
+ *
+ * This benefits other interceptor writers who might want
+ * to do their own resource deletion on this same pre-commit
+ * hook (and wouldn't be aware if we did this deletion already).
+ */
+ theTransactionDetails.addDeletedResourceId(goldenPid);
+ }
+ }
+ theTransactionDetails.putUserData(GOLDEN_RESOURCES_TO_DELETE, null);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_DELETED)
- public void deleteMdmLinks(RequestDetails theRequest, IBaseResource theResource) {
+ public void deleteMdmLinks(
+ RequestDetails theRequest, IBaseResource theResource, TransactionDetails theTransactionDetails) {
if (ourLinksDeletedBeforehand.get()) {
return;
}
if (myMdmSettings.isSupportedMdmType(myFhirContext.getResourceType(theResource))) {
-
IIdType sourceId = theResource.getIdElement().toVersionless();
IResourcePersistentId sourcePid =
myIdHelperSvc.getPidOrThrowException(RequestPartitionId.allPartitions(), sourceId);
@@ -213,34 +245,49 @@ public class MdmStorageInterceptor implements IMdmStorageInterceptor {
? linksByMatchResult.get(POSSIBLE_MATCH)
: new ArrayList<>();
- if (isDeletingLastMatchedSourceResouce(sourcePid, matches)) {
- // We are attempting to delete the only source resource left linked to the golden resource
- // In this case, we should automatically delete the golden resource to prevent orphaning
- IFhirResourceDao> dao = myDaoRegistry.getResourceDao(theResource);
+ if (isDeletingLastMatchedSourceResource(sourcePid, matches)) {
+ /*
+ * We are attempting to delete the only source resource left linked to the golden resource.
+ * In this case, we'll clean up remaining links and mark the orphaned
+ * golden resource for deletion, which we'll do in STORAGE_PRECOMMIT_RESOURCE_DELETED
+ */
IResourcePersistentId goldenPid = extractGoldenPid(theResource, matches.get(0));
+ if (!theTransactionDetails.getDeletedResourceIds().contains(goldenPid)) {
+ IFhirResourceDao> dao = myDaoRegistry.getResourceDao(theResource);
- cleanUpPossibleMatches(possibleMatches, dao, goldenPid, theRequest);
+ cleanUpPossibleMatches(possibleMatches, dao, goldenPid, theRequest);
- IAnyResource goldenResource = (IAnyResource) dao.readByPid(goldenPid);
- myMdmLinkDeleteSvc.deleteWithAnyReferenceTo(goldenResource);
+ IAnyResource goldenResource = (IAnyResource) dao.readByPid(goldenPid);
+ myMdmLinkDeleteSvc.deleteWithAnyReferenceTo(goldenResource);
- deleteGoldenResource(goldenPid, sourceId, dao, theRequest);
+ /*
+ * Mark the golden resource for deletion.
+ * We won't do it yet, because there might be additional deletes coming
+ * that include this exact golden resource
+ * (eg, if delete is done by a filter and multiple delete is enabled)
+ */
+ Set goldenIdsToDelete =
+ theTransactionDetails.getUserData(GOLDEN_RESOURCES_TO_DELETE);
+ if (goldenIdsToDelete == null) {
+ goldenIdsToDelete = new HashSet<>();
+ }
+ goldenIdsToDelete.add(goldenPid);
+ theTransactionDetails.putUserData(GOLDEN_RESOURCES_TO_DELETE, goldenIdsToDelete);
+ }
}
myMdmLinkDeleteSvc.deleteWithAnyReferenceTo(theResource);
}
}
+ @SuppressWarnings("rawtypes")
private void deleteGoldenResource(
- IResourcePersistentId goldenPid,
- IIdType theSourceId,
- IFhirResourceDao> theDao,
- RequestDetails theRequest) {
+ IResourcePersistentId goldenPid, IFhirResourceDao> theDao, RequestDetails theRequest) {
setLinksDeletedBeforehand();
if (myMdmSettings.isAutoExpungeGoldenResources()) {
int numDeleted = deleteExpungeGoldenResource(goldenPid);
if (numDeleted > 0) {
- ourLog.info("Removed {} golden resource(s) with references to {}", numDeleted, theSourceId);
+ ourLog.info("Removed {} golden resource(s).", numDeleted);
}
} else {
String url = theRequest == null ? "" : theRequest.getCompleteUrl();
@@ -289,7 +336,7 @@ public class MdmStorageInterceptor implements IMdmStorageInterceptor {
return goldenPid;
}
- private boolean isDeletingLastMatchedSourceResouce(IResourcePersistentId theSourcePid, List theMatches) {
+ private boolean isDeletingLastMatchedSourceResource(IResourcePersistentId theSourcePid, List theMatches) {
return theMatches.size() == 1
&& theMatches.get(0).getSourcePersistenceId().equals(theSourcePid);
}
@@ -302,6 +349,7 @@ public class MdmStorageInterceptor implements IMdmStorageInterceptor {
return retVal;
}
+ @SuppressWarnings("unchecked")
private int deleteExpungeGoldenResource(IResourcePersistentId theGoldenPid) {
IDeleteExpungeSvc deleteExpungeSvc = myIMdmClearHelperSvc.getDeleteExpungeSvc();
return deleteExpungeSvc.deleteExpunge(new ArrayList<>(Collections.singleton(theGoldenPid)), false, null);
From 5799c6b42bbb5d1ca80c546797ddf745c9944aa0 Mon Sep 17 00:00:00 2001
From: TipzCM
Date: Wed, 19 Jun 2024 18:21:20 -0400
Subject: [PATCH 06/19] hibernate search cannot be used for count query if
non-active parameters are present (#6027)
fixing count query with fulltextsearch bug
---
...l-index-search-with-filter-parameters.yaml | 10 +
.../fhir/jpa/dao/FulltextSearchSvcImpl.java | 31 ++-
.../uhn/fhir/jpa/dao/IFulltextSearchSvc.java | 12 +-
.../search/ExtendedHSearchSearchBuilder.java | 140 ++++++++-----
.../fhir/jpa/dao/search/LastNOperation.java | 9 +-
...lderConsumeAdvancedQueryClausesParams.java | 73 +++++++
...istedJpaSearchFirstPageBundleProvider.java | 2 +-
.../jpa/search/builder/SearchBuilder.java | 63 +++---
.../jpa/search/builder/SearchBuilderTest.java | 2 +-
...esourceDaoR4SearchWithElasticSearchIT.java | 20 +-
.../provider/r4/ResourceProviderR4Test.java | 12 +-
.../ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java | 2 +-
...iftedRefchainsAndChainedSortingR5Test.java | 6 +-
.../provider/r5/ResourceProviderR5Test.java | 197 ++++++++++++++++++
14 files changed, 478 insertions(+), 101 deletions(-)
create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6024-full-index-search-with-filter-parameters.yaml
create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/search/ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams.java
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6024-full-index-search-with-filter-parameters.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6024-full-index-search-with-filter-parameters.yaml
new file mode 100644
index 00000000000..d62a971afac
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6024-full-index-search-with-filter-parameters.yaml
@@ -0,0 +1,10 @@
+---
+type: fix
+issue: 6024
+title: "Fixed a bug in search where requesting a count with HSearch indexing
+ and FilterParameter enabled and using the _filter parameter would result
+ in inaccurate results being returned.
+ This happened because the count query would use an incorrect set of parameters
+ to find the count, and the regular search when then try and ensure its results
+ matched the count query (which it couldn't because it had different parameters).
+"
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java
index f7e1764e003..1a43905b822 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java
@@ -34,6 +34,7 @@ import ca.uhn.fhir.jpa.dao.search.IHSearchSortHelper;
import ca.uhn.fhir.jpa.dao.search.LastNOperation;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
+import ca.uhn.fhir.jpa.model.search.ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams;
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchIndexData;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteOptions;
@@ -141,17 +142,17 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
}
@Override
- public boolean supportsSomeOf(SearchParameterMap myParams) {
-
- // keep this in sync with the guts of doSearch
+ public boolean canUseHibernateSearch(String theResourceType, SearchParameterMap myParams) {
boolean requiresHibernateSearchAccess = myParams.containsKey(Constants.PARAM_CONTENT)
|| myParams.containsKey(Constants.PARAM_TEXT)
|| myParams.isLastN();
+ // we have to use it - _text and _content searches only use hibernate
+ if (requiresHibernateSearchAccess) {
+ return true;
+ }
- requiresHibernateSearchAccess |=
- myStorageSettings.isAdvancedHSearchIndexing() && myAdvancedIndexQueryBuilder.isSupportsSomeOf(myParams);
-
- return requiresHibernateSearchAccess;
+ return myStorageSettings.isAdvancedHSearchIndexing()
+ && myAdvancedIndexQueryBuilder.canUseHibernateSearch(theResourceType, myParams, mySearchParamRegistry);
}
@Override
@@ -174,6 +175,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
}
// keep this in sync with supportsSomeOf();
+ @SuppressWarnings("rawtypes")
private ISearchQueryExecutor doSearch(
String theResourceType,
SearchParameterMap theParams,
@@ -208,6 +210,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
return DEFAULT_MAX_NON_PAGED_SIZE;
}
+ @SuppressWarnings("rawtypes")
private SearchQueryOptionsStep, Long, SearchLoadingOptionsStep, ?, ?> getSearchQueryOptionsStep(
String theResourceType, SearchParameterMap theParams, IResourcePersistentId theReferencingPid) {
@@ -230,6 +233,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
return query;
}
+ @SuppressWarnings("rawtypes")
private PredicateFinalStep buildWhereClause(
SearchPredicateFactory f,
String theResourceType,
@@ -271,8 +275,12 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
* Handle other supported parameters
*/
if (myStorageSettings.isAdvancedHSearchIndexing() && theParams.getEverythingMode() == null) {
- myAdvancedIndexQueryBuilder.addAndConsumeAdvancedQueryClauses(
- builder, theResourceType, theParams, mySearchParamRegistry);
+ ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams params =
+ new ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams();
+ params.setSearchParamRegistry(mySearchParamRegistry)
+ .setResourceType(theResourceType)
+ .setSearchParameterMap(theParams);
+ myAdvancedIndexQueryBuilder.addAndConsumeAdvancedQueryClauses(builder, params);
}
// DROP EARLY HERE IF BOOL IS EMPTY?
});
@@ -283,11 +291,13 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
return Search.session(myEntityManager);
}
+ @SuppressWarnings("rawtypes")
private List convertLongsToResourcePersistentIds(List theLongPids) {
return theLongPids.stream().map(JpaPid::fromId).collect(Collectors.toList());
}
@Override
+ @SuppressWarnings({"rawtypes", "unchecked"})
public List everything(
String theResourceName,
SearchParameterMap theParams,
@@ -336,6 +346,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
@Transactional()
@Override
+ @SuppressWarnings("unchecked")
public List search(
String theResourceName, SearchParameterMap theParams, RequestDetails theRequestDetails) {
validateHibernateSearchIsEnabled();
@@ -347,6 +358,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
/**
* Adapt our async interface to the legacy concrete List
*/
+ @SuppressWarnings("rawtypes")
private List toList(ISearchQueryExecutor theSearchResultStream, long theMaxSize) {
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(theSearchResultStream, 0), false)
.map(JpaPid::fromId)
@@ -384,6 +396,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
}
@Override
+ @SuppressWarnings("rawtypes")
public List lastN(SearchParameterMap theParams, Integer theMaximumResults) {
ensureElastic();
dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFulltextSearchSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFulltextSearchSvc.java
index 6da76807b17..6890b9bc26f 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFulltextSearchSvc.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFulltextSearchSvc.java
@@ -32,6 +32,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import java.util.Collection;
import java.util.List;
+@SuppressWarnings({"rawtypes"})
public interface IFulltextSearchSvc {
/**
@@ -79,11 +80,18 @@ public interface IFulltextSearchSvc {
ExtendedHSearchIndexData extractLuceneIndexData(
IBaseResource theResource, ResourceIndexedSearchParams theNewParams);
- boolean supportsSomeOf(SearchParameterMap myParams);
+ /**
+ * Returns true if the parameter map can be handled for hibernate search.
+ * We have to filter out any queries that might use search params
+ * we only know how to handle in JPA.
+ * -
+ * See {@link ca.uhn.fhir.jpa.dao.search.ExtendedHSearchSearchBuilder#addAndConsumeAdvancedQueryClauses}
+ */
+ boolean canUseHibernateSearch(String theResourceType, SearchParameterMap theParameterMap);
/**
* Re-publish the resource to the full-text index.
- *
+ * -
* During update, hibernate search only republishes the entity if it has changed.
* During $reindex, we want to force the re-index.
*
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchSearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchSearchBuilder.java
index b5f2d42ff7d..9f6b6c88261 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchSearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchSearchBuilder.java
@@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.dao.search;
import ca.uhn.fhir.context.RuntimeSearchParam;
+import ca.uhn.fhir.jpa.model.search.ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
import ca.uhn.fhir.model.api.IQueryParameterType;
@@ -34,6 +35,7 @@ import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
+import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.apache.commons.collections4.CollectionUtils;
@@ -44,6 +46,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
+import java.util.stream.Collectors;
import static ca.uhn.fhir.rest.api.Constants.PARAMQUALIFIER_MISSING;
@@ -59,17 +62,55 @@ public class ExtendedHSearchSearchBuilder {
public static final Set ourUnsafeSearchParmeters = Sets.newHashSet("_id", "_meta");
/**
- * Are any of the queries supported by our indexing?
+ * Determine if ExtendedHibernateSearchBuilder can support this parameter
+ * @param theParamName param name
+ * @param theActiveParamsForResourceType active search parameters for the desired resource type
+ * @return whether or not this search parameter is supported in hibernate
*/
- public boolean isSupportsSomeOf(SearchParameterMap myParams) {
- return myParams.getSort() != null
- || myParams.getLastUpdated() != null
- || myParams.entrySet().stream()
- .filter(e -> !ourUnsafeSearchParmeters.contains(e.getKey()))
- // each and clause may have a different modifier, so split down to the ORs
- .flatMap(andList -> andList.getValue().stream())
- .flatMap(Collection::stream)
- .anyMatch(this::isParamTypeSupported);
+ public boolean supportsSearchParameter(String theParamName, ResourceSearchParams theActiveParamsForResourceType) {
+ if (theActiveParamsForResourceType == null) {
+ return false;
+ }
+ if (ourUnsafeSearchParmeters.contains(theParamName)) {
+ return false;
+ }
+ if (!theActiveParamsForResourceType.containsParamName(theParamName)) {
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Are any of the queries supported by our indexing?
+ * -
+ * If not, do not use hibernate, because the results will
+ * be inaccurate and wrong.
+ */
+ public boolean canUseHibernateSearch(
+ String theResourceType, SearchParameterMap myParams, ISearchParamRegistry theSearchParamRegistry) {
+ boolean canUseHibernate = true;
+ ResourceSearchParams resourceActiveSearchParams = theSearchParamRegistry.getActiveSearchParams(theResourceType);
+ for (String paramName : myParams.keySet()) {
+ // is this parameter supported?
+ if (!supportsSearchParameter(paramName, resourceActiveSearchParams)) {
+ canUseHibernate = false;
+ } else {
+ // are the parameter values supported?
+ canUseHibernate =
+ myParams.get(paramName).stream()
+ .flatMap(Collection::stream)
+ .collect(Collectors.toList())
+ .stream()
+ .anyMatch(this::isParamTypeSupported);
+ }
+
+ // if not supported, don't use
+ if (!canUseHibernate) {
+ return false;
+ }
+ }
+
+ return canUseHibernate;
}
/**
@@ -166,86 +207,91 @@ public class ExtendedHSearchSearchBuilder {
}
public void addAndConsumeAdvancedQueryClauses(
- ExtendedHSearchClauseBuilder builder,
- String theResourceType,
- SearchParameterMap theParams,
- ISearchParamRegistry theSearchParamRegistry) {
+ ExtendedHSearchClauseBuilder theBuilder,
+ ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams theMethodParams) {
+ SearchParameterMap searchParameterMap = theMethodParams.getSearchParameterMap();
+ String resourceType = theMethodParams.getResourceType();
+ ISearchParamRegistry searchParamRegistry = theMethodParams.getSearchParamRegistry();
+
// copy the keys to avoid concurrent modification error
- ArrayList paramNames = compileParamNames(theParams);
+ ArrayList paramNames = compileParamNames(searchParameterMap);
+ ResourceSearchParams activeSearchParams = searchParamRegistry.getActiveSearchParams(resourceType);
for (String nextParam : paramNames) {
- if (ourUnsafeSearchParmeters.contains(nextParam)) {
- continue;
- }
- RuntimeSearchParam activeParam = theSearchParamRegistry.getActiveSearchParam(theResourceType, nextParam);
- if (activeParam == null) {
+ if (!supportsSearchParameter(nextParam, activeSearchParams)) {
// ignore magic params handled in JPA
continue;
}
+ RuntimeSearchParam activeParam = activeSearchParams.get(nextParam);
// NOTE - keep this in sync with isParamSupported() above.
switch (activeParam.getParamType()) {
case TOKEN:
List> tokenTextAndOrTerms =
- theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_TOKEN_TEXT);
- builder.addStringTextSearch(nextParam, tokenTextAndOrTerms);
+ searchParameterMap.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_TOKEN_TEXT);
+ theBuilder.addStringTextSearch(nextParam, tokenTextAndOrTerms);
List> tokenUnmodifiedAndOrTerms =
- theParams.removeByNameUnmodified(nextParam);
- builder.addTokenUnmodifiedSearch(nextParam, tokenUnmodifiedAndOrTerms);
+ searchParameterMap.removeByNameUnmodified(nextParam);
+ theBuilder.addTokenUnmodifiedSearch(nextParam, tokenUnmodifiedAndOrTerms);
break;
case STRING:
List> stringTextAndOrTerms =
- theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_TOKEN_TEXT);
- builder.addStringTextSearch(nextParam, stringTextAndOrTerms);
+ searchParameterMap.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_TOKEN_TEXT);
+ theBuilder.addStringTextSearch(nextParam, stringTextAndOrTerms);
- List> stringExactAndOrTerms =
- theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_STRING_EXACT);
- builder.addStringExactSearch(nextParam, stringExactAndOrTerms);
+ List> stringExactAndOrTerms = searchParameterMap.removeByNameAndModifier(
+ nextParam, Constants.PARAMQUALIFIER_STRING_EXACT);
+ theBuilder.addStringExactSearch(nextParam, stringExactAndOrTerms);
List> stringContainsAndOrTerms =
- theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_STRING_CONTAINS);
- builder.addStringContainsSearch(nextParam, stringContainsAndOrTerms);
+ searchParameterMap.removeByNameAndModifier(
+ nextParam, Constants.PARAMQUALIFIER_STRING_CONTAINS);
+ theBuilder.addStringContainsSearch(nextParam, stringContainsAndOrTerms);
- List> stringAndOrTerms = theParams.removeByNameUnmodified(nextParam);
- builder.addStringUnmodifiedSearch(nextParam, stringAndOrTerms);
+ List> stringAndOrTerms =
+ searchParameterMap.removeByNameUnmodified(nextParam);
+ theBuilder.addStringUnmodifiedSearch(nextParam, stringAndOrTerms);
break;
case QUANTITY:
- List> quantityAndOrTerms = theParams.removeByNameUnmodified(nextParam);
- builder.addQuantityUnmodifiedSearch(nextParam, quantityAndOrTerms);
+ List> quantityAndOrTerms =
+ searchParameterMap.removeByNameUnmodified(nextParam);
+ theBuilder.addQuantityUnmodifiedSearch(nextParam, quantityAndOrTerms);
break;
case REFERENCE:
- List> referenceAndOrTerms = theParams.removeByNameUnmodified(nextParam);
- builder.addReferenceUnchainedSearch(nextParam, referenceAndOrTerms);
+ List> referenceAndOrTerms =
+ searchParameterMap.removeByNameUnmodified(nextParam);
+ theBuilder.addReferenceUnchainedSearch(nextParam, referenceAndOrTerms);
break;
case DATE:
List> dateAndOrTerms = nextParam.equalsIgnoreCase("_lastupdated")
- ? getLastUpdatedAndOrList(theParams)
- : theParams.removeByNameUnmodified(nextParam);
- builder.addDateUnmodifiedSearch(nextParam, dateAndOrTerms);
+ ? getLastUpdatedAndOrList(searchParameterMap)
+ : searchParameterMap.removeByNameUnmodified(nextParam);
+ theBuilder.addDateUnmodifiedSearch(nextParam, dateAndOrTerms);
break;
case COMPOSITE:
- List> compositeAndOrTerms = theParams.removeByNameUnmodified(nextParam);
+ List> compositeAndOrTerms =
+ searchParameterMap.removeByNameUnmodified(nextParam);
// RuntimeSearchParam only points to the subs by reference. Resolve here while we have
// ISearchParamRegistry
List subSearchParams =
- JpaParamUtil.resolveCompositeComponentsDeclaredOrder(theSearchParamRegistry, activeParam);
- builder.addCompositeUnmodifiedSearch(activeParam, subSearchParams, compositeAndOrTerms);
+ JpaParamUtil.resolveCompositeComponentsDeclaredOrder(searchParamRegistry, activeParam);
+ theBuilder.addCompositeUnmodifiedSearch(activeParam, subSearchParams, compositeAndOrTerms);
break;
case URI:
List> uriUnmodifiedAndOrTerms =
- theParams.removeByNameUnmodified(nextParam);
- builder.addUriUnmodifiedSearch(nextParam, uriUnmodifiedAndOrTerms);
+ searchParameterMap.removeByNameUnmodified(nextParam);
+ theBuilder.addUriUnmodifiedSearch(nextParam, uriUnmodifiedAndOrTerms);
break;
case NUMBER:
- List> numberUnmodifiedAndOrTerms = theParams.remove(nextParam);
- builder.addNumberUnmodifiedSearch(nextParam, numberUnmodifiedAndOrTerms);
+ List> numberUnmodifiedAndOrTerms = searchParameterMap.remove(nextParam);
+ theBuilder.addNumberUnmodifiedSearch(nextParam, numberUnmodifiedAndOrTerms);
break;
default:
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/LastNOperation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/LastNOperation.java
index 1263bf027f5..823a2893a5f 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/LastNOperation.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/LastNOperation.java
@@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.dao.search;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
+import ca.uhn.fhir.jpa.model.search.ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
@@ -67,8 +68,12 @@ public class LastNOperation {
b.must(f.match().field("myResourceType").matching(OBSERVATION_RES_TYPE));
ExtendedHSearchClauseBuilder builder =
new ExtendedHSearchClauseBuilder(myFhirContext, myStorageSettings, b, f);
- myExtendedHSearchSearchBuilder.addAndConsumeAdvancedQueryClauses(
- builder, OBSERVATION_RES_TYPE, theParams.clone(), mySearchParamRegistry);
+ ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams params =
+ new ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams();
+ params.setResourceType(OBSERVATION_RES_TYPE)
+ .setSearchParameterMap(theParams.clone())
+ .setSearchParamRegistry(mySearchParamRegistry);
+ myExtendedHSearchSearchBuilder.addAndConsumeAdvancedQueryClauses(builder, params);
}))
.aggregation(observationsByCodeKey, f -> f.fromJson(lastNAggregation.toAggregation()))
.fetch(0);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/search/ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/search/ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams.java
new file mode 100644
index 00000000000..3bb80396a0f
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/search/ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams.java
@@ -0,0 +1,73 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.jpa.model.search;
+
+import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchClauseBuilder;
+import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
+import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
+
+/**
+ * This is a parameter class for the
+ * {@link ca.uhn.fhir.jpa.dao.search.ExtendedHSearchSearchBuilder#addAndConsumeAdvancedQueryClauses(ExtendedHSearchClauseBuilder, ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams)}
+ * method, so that we can keep the signature manageable (small) and allow for updates without breaking
+ * implementers so often.
+ */
+public class ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams {
+ /**
+ * Resource type
+ */
+ private String myResourceType;
+ /**
+ * The registered search
+ */
+ private SearchParameterMap mySearchParameterMap;
+ /**
+ * Search param registry
+ */
+ private ISearchParamRegistry mySearchParamRegistry;
+
+ public String getResourceType() {
+ return myResourceType;
+ }
+
+ public ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams setResourceType(String theResourceType) {
+ myResourceType = theResourceType;
+ return this;
+ }
+
+ public SearchParameterMap getSearchParameterMap() {
+ return mySearchParameterMap;
+ }
+
+ public ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams setSearchParameterMap(SearchParameterMap theParams) {
+ mySearchParameterMap = theParams;
+ return this;
+ }
+
+ public ISearchParamRegistry getSearchParamRegistry() {
+ return mySearchParamRegistry;
+ }
+
+ public ExtendedHSearchBuilderConsumeAdvancedQueryClausesParams setSearchParamRegistry(
+ ISearchParamRegistry theSearchParamRegistry) {
+ mySearchParamRegistry = theSearchParamRegistry;
+ return this;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java
index 9843034bee1..ec4f7fa16e5 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java
@@ -79,7 +79,7 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl
ourLog.trace("Done fetching search resource PIDs");
int countOfPids = pids.size();
- ;
+
int maxSize = Math.min(theToIndex - theFromIndex, countOfPids);
thePageBuilder.setTotalRequestedResourcesFetched(countOfPids);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
index 076c6217660..a2bc4a4fcf5 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
@@ -95,6 +95,7 @@ import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.StringUtil;
import ca.uhn.fhir.util.UrlUtil;
+import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Streams;
import com.healthmarketscience.sqlbuilder.Condition;
import jakarta.annotation.Nonnull;
@@ -165,7 +166,7 @@ public class SearchBuilder implements ISearchBuilder {
public static boolean myUseMaxPageSize50ForTest = false;
protected final IInterceptorBroadcaster myInterceptorBroadcaster;
protected final IResourceTagDao myResourceTagDao;
- String myResourceName;
+ private String myResourceName;
private final Class extends IBaseResource> myResourceType;
private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory;
private final SqlObjectFactory mySqlBuilderFactory;
@@ -206,6 +207,7 @@ public class SearchBuilder implements ISearchBuilder {
/**
* Constructor
*/
+ @SuppressWarnings({"rawtypes", "unchecked"})
public SearchBuilder(
IDao theDao,
String theResourceName,
@@ -240,6 +242,11 @@ public class SearchBuilder implements ISearchBuilder {
myIdHelperService = theIdHelperService;
}
+ @VisibleForTesting
+ void setResourceName(String theName) {
+ myResourceName = theName;
+ }
+
@Override
public void setMaxResultsToFetch(Integer theMaxResultsToFetch) {
myMaxResultsToFetch = theMaxResultsToFetch;
@@ -265,8 +272,6 @@ public class SearchBuilder implements ISearchBuilder {
attemptComboUniqueSpProcessing(theQueryStack, theParams, theRequest);
}
- SearchContainedModeEnum searchContainedMode = theParams.getSearchContainedMode();
-
// Handle _id and _tag last, since they can typically be tacked onto a different parameter
List paramNames = myParams.keySet().stream()
.filter(t -> !t.equals(IAnyResource.SP_RES_ID))
@@ -399,7 +404,8 @@ public class SearchBuilder implements ISearchBuilder {
}
if (fulltextExecutor == null) {
- fulltextExecutor = SearchQueryExecutors.from(fulltextMatchIds);
+ fulltextExecutor =
+ SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>());
}
if (theSearchRuntimeDetails != null) {
@@ -486,7 +492,7 @@ public class SearchBuilder implements ISearchBuilder {
return fulltextEnabled
&& myParams != null
&& myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE
- && myFulltextSearchSvc.supportsSomeOf(myParams)
+ && myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams)
&& myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams);
}
@@ -538,8 +544,7 @@ public class SearchBuilder implements ISearchBuilder {
pid = myIdHelperService.resolveResourcePersistentIds(myRequestPartitionId, myResourceName, idParamValue);
}
- List pids = myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails);
- return pids;
+ return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails);
}
private void doCreateChunkedQueries(
@@ -862,13 +867,8 @@ public class SearchBuilder implements ISearchBuilder {
theQueryStack.addSortOnLastUpdated(ascending);
} else {
-
- RuntimeSearchParam param = null;
-
- if (param == null) {
- // do we have a composition param defined for the whole chain?
- param = mySearchParamRegistry.getActiveSearchParam(myResourceName, theSort.getParamName());
- }
+ RuntimeSearchParam param =
+ mySearchParamRegistry.getActiveSearchParam(myResourceName, theSort.getParamName());
/*
* If we have a sort like _sort=subject.name and we have an
@@ -896,9 +896,7 @@ public class SearchBuilder implements ISearchBuilder {
mySearchParamRegistry.getActiveSearchParam(myResourceName, referenceParam);
if (outerParam == null) {
throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam);
- }
-
- if (outerParam.hasUpliftRefchain(targetParam)) {
+ } else if (outerParam.hasUpliftRefchain(targetParam)) {
for (String nextTargetType : outerParam.getTargets()) {
if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) {
continue;
@@ -945,6 +943,9 @@ public class SearchBuilder implements ISearchBuilder {
throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName);
}
+ // param will never be null here (the above line throws if it does)
+ // this is just to prevent the warning
+ assert param != null;
if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) {
throw new InvalidRequestException(
Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter");
@@ -1121,11 +1122,15 @@ public class SearchBuilder implements ISearchBuilder {
resourceType, next, tagMap.get(next.getId()), theForHistoryOperation);
}
if (resource == null) {
- ourLog.warn(
- "Unable to find resource {}/{}/_history/{} in database",
- next.getResourceType(),
- next.getIdDt().getIdPart(),
- next.getVersion());
+ if (next != null) {
+ ourLog.warn(
+ "Unable to find resource {}/{}/_history/{} in database",
+ next.getResourceType(),
+ next.getIdDt().getIdPart(),
+ next.getVersion());
+ } else {
+ ourLog.warn("Unable to find resource in database.");
+ }
continue;
}
@@ -1196,7 +1201,6 @@ public class SearchBuilder implements ISearchBuilder {
RequestDetails theDetails) {
if (thePids.isEmpty()) {
ourLog.debug("The include pids are empty");
- // return;
}
// Dupes will cause a crash later anyhow, but this is expensive so only do it
@@ -1256,10 +1260,9 @@ public class SearchBuilder implements ISearchBuilder {
// only impl
// to handle lastN?
if (myStorageSettings.isAdvancedHSearchIndexing() && myStorageSettings.isStoreResourceInHSearchIndex()) {
- List pidList = thePids.stream().map(pid -> (pid).getId()).collect(Collectors.toList());
+ List pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList());
- List resources = myFulltextSearchSvc.getResources(pidList);
- return resources;
+ return myFulltextSearchSvc.getResources(pidList);
} else if (!Objects.isNull(myParams) && myParams.isLastN()) {
// legacy LastN implementation
return myIElasticsearchSvc.getObservationResources(thePids);
@@ -1344,7 +1347,7 @@ public class SearchBuilder implements ISearchBuilder {
for (Iterator iter = includes.iterator(); iter.hasNext(); ) {
Include nextInclude = iter.next();
- if (nextInclude.isRecurse() == false) {
+ if (!nextInclude.isRecurse()) {
iter.remove();
}
@@ -1707,6 +1710,8 @@ public class SearchBuilder implements ISearchBuilder {
}
/**
+ * Calls Performance Trace Hook
+ * @param request the request deatils
* Sends a raw SQL query to the Pointcut for raw SQL queries.
*/
private void callRawSqlHookWithCurrentThreadQueries(RequestDetails request) {
@@ -1890,7 +1895,7 @@ public class SearchBuilder implements ISearchBuilder {
for (RuntimeSearchParam nextCandidate : candidateComboParams) {
List nextCandidateParamNames =
JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream()
- .map(t -> t.getName())
+ .map(RuntimeSearchParam::getName)
.collect(Collectors.toList());
if (theParams.keySet().containsAll(nextCandidateParamNames)) {
comboParam = nextCandidate;
@@ -1902,7 +1907,7 @@ public class SearchBuilder implements ISearchBuilder {
if (comboParam != null) {
// Since we're going to remove elements below
- theParams.values().forEach(nextAndList -> ensureSubListsAreWritable(nextAndList));
+ theParams.values().forEach(this::ensureSubListsAreWritable);
StringBuilder sb = new StringBuilder();
sb.append(myResourceName);
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchBuilderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchBuilderTest.java
index fd8c3948478..f6564c8c53e 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchBuilderTest.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/SearchBuilderTest.java
@@ -39,7 +39,7 @@ class SearchBuilderTest {
@BeforeEach
public void beforeEach() {
- mySearchBuilder.myResourceName = "QuestionnaireResponse";
+ mySearchBuilder.setResourceName("QuestionnaireResponse");
when(myDaoRegistry.getRegisteredDaoTypes()).thenReturn(ourCtx.getResourceTypes());
}
diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java
index 947638b7d11..1a8d4178c75 100644
--- a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java
+++ b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchWithElasticSearchIT.java
@@ -26,6 +26,7 @@ import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.search.BaseSourceSearchParameterTestCases;
import ca.uhn.fhir.jpa.search.CompositeSearchParameterTestCases;
import ca.uhn.fhir.jpa.search.QuantitySearchParameterTestCases;
+import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
@@ -54,6 +55,9 @@ import ca.uhn.fhir.test.utilities.LogbackLevelOverrideExtension;
import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
import ca.uhn.fhir.validation.FhirValidator;
import ca.uhn.fhir.validation.ValidationResult;
+import ca.uhn.test.util.LogbackTestExtension;
+import ch.qos.logback.classic.Level;
+import ch.qos.logback.classic.spi.ILoggingEvent;
import jakarta.annotation.Nonnull;
import jakarta.persistence.EntityManager;
import org.hl7.fhir.instance.model.api.IBaseCoding;
@@ -118,6 +122,7 @@ import static ca.uhn.fhir.jpa.model.util.UcumServiceUtil.UCUM_CODESYSTEM_URL;
import static ca.uhn.fhir.rest.api.Constants.CHARSET_UTF8;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -168,6 +173,9 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
TestDaoSearch myTestDaoSearch;
@RegisterExtension
LogbackLevelOverrideExtension myLogbackLevelOverrideExtension = new LogbackLevelOverrideExtension();
+
+ @RegisterExtension
+ LogbackTestExtension myLogbackTestExtension = new LogbackTestExtension();
@Autowired
@Qualifier("myCodeSystemDaoR4")
private IFhirResourceDao myCodeSystemDao;
@@ -742,19 +750,21 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
*/
@Test
public void testDirectPathWholeResourceNotIndexedWorks() {
+ // setup
+ myLogbackLevelOverrideExtension.setLogLevel(SearchBuilder.class, Level.WARN);
IIdType id1 = myTestDataBuilder.createObservation(List.of(myTestDataBuilder.withObservationCode("http://example.com/", "theCode")));
// set it after creating resource, so search doesn't find it in the index
myStorageSettings.setStoreResourceInHSearchIndex(true);
- myCaptureQueriesListener.clear();
-
- List result = searchForFastResources("Observation?code=theCode");
- myCaptureQueriesListener.logSelectQueriesForCurrentThread();
+ List result = searchForFastResources("Observation?code=theCode&_count=10&_total=accurate");
assertThat(result).hasSize(1);
assertEquals(((Observation) result.get(0)).getIdElement().getIdPart(), id1.getIdPart());
- assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()).as("JPA search for IDs and for resources").isEqualTo(2);
+
+ List events = myLogbackTestExtension.filterLoggingEventsWithPredicate(e -> e.getLevel() == Level.WARN);
+ assertFalse(events.isEmpty());
+ assertTrue(events.stream().anyMatch(e -> e.getFormattedMessage().contains("Some resources were not found in index. Make sure all resources were indexed. Resorting to database search.")));
// restore changed property
JpaStorageSettings defaultConfig = new JpaStorageSettings();
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
index 14fb2bb0d7e..a21183839e6 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
@@ -2120,6 +2120,8 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
@SuppressWarnings("unused")
@Test
public void testFullTextSearch() throws Exception {
+ IParser parser = myFhirContext.newJsonParser();
+
Observation obs1 = new Observation();
obs1.getCode().setText("Systolic Blood Pressure");
obs1.setStatus(ObservationStatus.FINAL);
@@ -2131,13 +2133,21 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
obs2.setStatus(ObservationStatus.FINAL);
obs2.setValue(new Quantity(81));
IIdType id2 = myObservationDao.create(obs2, mySrd).getId().toUnqualifiedVersionless();
+ obs2.setId(id2);
+
+ myStorageSettings.setAdvancedHSearchIndexing(true);
HttpGet get = new HttpGet(myServerBase + "/Observation?_content=systolic&_pretty=true");
+ get.addHeader("Content-Type", "application/json");
try (CloseableHttpResponse response = ourHttpClient.execute(get)) {
assertEquals(200, response.getStatusLine().getStatusCode());
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(responseString);
- assertThat(responseString).contains(id1.getIdPart());
+ Bundle bundle = parser.parseResource(Bundle.class, responseString);
+ assertEquals(1, bundle.getTotal());
+ Resource resource = bundle.getEntry().get(0).getResource();
+ assertEquals("Observation", resource.fhirType());
+ assertEquals(id1.getIdPart(), resource.getIdPart());
}
}
diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java
index d6c38965bbb..81e03a692b3 100644
--- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java
+++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java
@@ -398,7 +398,7 @@ public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuil
private PerformanceTracingLoggingInterceptor myPerformanceTracingLoggingInterceptor;
@Autowired
- private DaoRegistry myDaoRegistry;
+ protected DaoRegistry myDaoRegistry;
@Autowired
private IBulkDataExportJobSchedulingHelper myBulkDataSchedulerHelper;
diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java
index 575da4a5e31..f852b379a76 100644
--- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java
+++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java
@@ -13,16 +13,17 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.util.BundleBuilder;
import ca.uhn.fhir.util.HapiExtensions;
+import jakarta.annotation.Nonnull;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
-import org.hl7.fhir.r5.model.Composition;
-import org.hl7.fhir.r5.model.IdType;
import org.hl7.fhir.r5.model.Bundle;
import org.hl7.fhir.r5.model.CodeType;
+import org.hl7.fhir.r5.model.Composition;
import org.hl7.fhir.r5.model.DateType;
import org.hl7.fhir.r5.model.Encounter;
import org.hl7.fhir.r5.model.Enumerations;
import org.hl7.fhir.r5.model.Extension;
+import org.hl7.fhir.r5.model.IdType;
import org.hl7.fhir.r5.model.Identifier;
import org.hl7.fhir.r5.model.Organization;
import org.hl7.fhir.r5.model.Patient;
@@ -35,7 +36,6 @@ import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
-import jakarta.annotation.Nonnull;
import java.util.List;
import static org.apache.commons.lang3.StringUtils.countMatches;
diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java
index ba047dc73c5..3870487c81c 100644
--- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java
+++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5Test.java
@@ -1,11 +1,26 @@
package ca.uhn.fhir.jpa.provider.r5;
+import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx;
+import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters;
+import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
+import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
+import ca.uhn.fhir.model.api.Include;
+import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
+import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
+import ca.uhn.fhir.rest.api.SortOrderEnum;
+import ca.uhn.fhir.rest.api.SortSpec;
+import ca.uhn.fhir.rest.api.server.IBundleProvider;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor;
+import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.server.exceptions.NotImplementedOperationException;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.util.BundleBuilder;
@@ -24,13 +39,18 @@ import org.hl7.fhir.r5.model.CarePlan;
import org.hl7.fhir.r5.model.CodeableConcept;
import org.hl7.fhir.r5.model.Condition;
import org.hl7.fhir.r5.model.DateTimeType;
+import org.hl7.fhir.r5.model.Extension;
import org.hl7.fhir.r5.model.MedicationRequest;
+import org.hl7.fhir.r5.model.MedicinalProductDefinition;
import org.hl7.fhir.r5.model.Observation;
import org.hl7.fhir.r5.model.Observation.ObservationComponentComponent;
import org.hl7.fhir.r5.model.Organization;
import org.hl7.fhir.r5.model.Parameters;
import org.hl7.fhir.r5.model.Patient;
import org.hl7.fhir.r5.model.Quantity;
+import org.hl7.fhir.r5.model.SearchParameter;
+import org.hl7.fhir.r5.model.StringType;
+import org.intellij.lang.annotations.Language;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -43,11 +63,13 @@ import org.springframework.util.comparator.Comparators;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.Set;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.leftPad;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
@SuppressWarnings("Duplicates")
public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
@@ -206,7 +228,181 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
assertEquals(501, e.getStatusCode());
assertThat(e.getMessage()).contains("Some Failure Message");
}
+ }
+ @Test
+ public void searchForNewerResources_fullTextSearchWithFilterAndCount_shouldReturnAccurateResults() {
+ IParser parser = myFhirContext.newJsonParser();
+ int count = 10;
+
+ boolean presetFilterParameterEnabled = myStorageSettings.isFilterParameterEnabled();
+ boolean presetAdvancedHSearchIndexing = myStorageSettings.isAdvancedHSearchIndexing();
+
+ try {
+ // fullTextSearch means Advanced Hibernate Search
+ myStorageSettings.setFilterParameterEnabled(true);
+ myStorageSettings.setAdvancedHSearchIndexing(true);
+
+ // create custom search parameters - the _filter and _include are needed
+ {
+ @SuppressWarnings("unchecked")
+ IFhirResourceDao spDao = myDaoRegistry.getResourceDao("SearchParameter");
+ SearchParameter sp;
+
+ @Language("JSON")
+ String includeParam = """
+ {
+ "resourceType": "SearchParameter",
+ "id": "9905463e-e817-4db0-9a3e-ff6aa3427848",
+ "meta": {
+ "versionId": "2",
+ "lastUpdated": "2024-03-28T12:53:57.874+00:00",
+ "source": "#7b34a4bfa42fe3ae"
+ },
+ "title": "Medicinal Product Manfacturer",
+ "status": "active",
+ "publisher": "MOH-IDMS",
+ "code": "productmanufacturer",
+ "base": [
+ "MedicinalProductDefinition"
+ ],
+ "type": "reference",
+ "expression": "MedicinalProductDefinition.operation.organization"
+ }
+ """;
+ sp = parser.parseResource(SearchParameter.class, includeParam);
+ spDao.create(sp, new SystemRequestDetails());
+ sp = null;
+ @Language("JSON")
+ String filterParam = """
+ {
+ "resourceType": "SearchParameter",
+ "id": "SEARCH-PARAMETER-MedicinalProductDefinition-SearchableString",
+ "meta": {
+ "versionId": "2",
+ "lastUpdated": "2024-03-27T19:20:25.200+00:00",
+ "source": "#384dd6bccaeafa6c"
+ },
+ "url": "https://health.gov.on.ca/idms/fhir/SearchParameter/MedicinalProductDefinition-SearchableString",
+ "version": "1.0.0",
+ "name": "MedicinalProductDefinitionSearchableString",
+ "status": "active",
+ "publisher": "MOH-IDMS",
+ "description": "Search Parameter for the MedicinalProductDefinition Searchable String Extension",
+ "code": "MedicinalProductDefinitionSearchableString",
+ "base": [
+ "MedicinalProductDefinition"
+ ],
+ "type": "string",
+ "expression": "MedicinalProductDefinition.extension('https://health.gov.on.ca/idms/fhir/StructureDefinition/SearchableExtraString')",
+ "target": [
+ "MedicinalProductDefinition"
+ ]
+ }
+ """;
+ sp = parser.parseResource(SearchParameter.class, filterParam);
+ spDao.create(sp, new SystemRequestDetails());
+ }
+ // create MedicinalProductDefinitions
+ MedicinalProductDefinition mdr;
+ {
+ @Language("JSON")
+ String mpdstr = """
+ {
+ "resourceType": "MedicinalProductDefinition",
+ "id": "36fb418b-4b1f-414c-bbb1-731bc8744b93",
+ "meta": {
+ "versionId": "17",
+ "lastUpdated": "2024-06-10T16:52:23.907+00:00",
+ "source": "#3a309416d5f52c5b",
+ "profile": [
+ "https://health.gov.on.ca/idms/fhir/StructureDefinition/IDMS_MedicinalProductDefinition"
+ ]
+ },
+ "extension": [
+ {
+ "url": "https://health.gov.on.ca/idms/fhir/StructureDefinition/SearchableExtraString",
+ "valueString": "zahidbrand0610-2up|genupuu|qwewqe2 111|11111115|DF other des|Biologic|Oncology|Private Label"
+ }
+ ],
+ "status": {
+ "coding": [
+ {
+ "system": "http://hl7.org/fhir/ValueSet/publication-status",
+ "code": "active",
+ "display": "Active"
+ }
+ ]
+ },
+ "name": [
+ {
+ "productName": "zahidbrand0610-2up"
+ }
+ ]
+ }
+ """;
+ mdr = parser.parseResource(MedicinalProductDefinition.class, mpdstr);
+ }
+ IFhirResourceDao mdrdao = myDaoRegistry.getResourceDao(MedicinalProductDefinition.class);
+
+ /*
+ * We actually want a bunch of non-matching resources in the db
+ * that won't match the filter before we get to the one that will.
+ *
+ * To this end, we're going to insert more than we plan
+ * on retrieving to ensure the _filter is being used in both the
+ * count query and the actual db hit
+ */
+ List productNames = mdr.getName();
+ mdr.setName(null);
+ List extensions = mdr.getExtension();
+ mdr.setExtension(null);
+ // we need at least 10 of these; 20 should be good
+ for (int i = 0; i < 2 * count; i++) {
+ mdr.addName(new MedicinalProductDefinition.MedicinalProductDefinitionNameComponent("Product " + i));
+ mdr.addExtension()
+ .setUrl("https://health.gov.on.ca/idms/fhir/StructureDefinition/SearchableExtraString")
+ .setValue(new StringType("Non-matching string " + i));
+ mdrdao.create(mdr, new SystemRequestDetails());
+ }
+ mdr.setName(productNames);
+ mdr.setExtension(extensions);
+ mdrdao.create(mdr, new SystemRequestDetails());
+
+ // do a reindex
+ ReindexJobParameters jobParameters = new ReindexJobParameters();
+ jobParameters.setRequestPartitionId(RequestPartitionId.allPartitions());
+ JobInstanceStartRequest request = new JobInstanceStartRequest();
+ request.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
+ request.setParameters(jobParameters);
+ Batch2JobStartResponse response = myJobCoordinator.startInstance(new SystemRequestDetails(), request);
+
+ myBatch2JobHelper.awaitJobCompletion(response);
+
+ // query like:
+ // MedicinalProductDefinition?_getpagesoffset=0&_count=10&_total=accurate&_sort:asc=name&status=active&_include=MedicinalProductDefinition:productmanufacturer&_filter=MedicinalProductDefinitionSearchableString%20co%20%22zah%22
+ SearchParameterMap map = new SearchParameterMap();
+ map.setCount(10);
+ map.setSearchTotalMode(SearchTotalModeEnum.ACCURATE);
+ map.setSort(new SortSpec().setOrder(SortOrderEnum.ASC).setParamName("name"));
+ map.setIncludes(Set.of(
+ new Include("MedicinalProductDefinition:productmanufacturer")
+ ));
+ map.add("_filter", new StringParam("MedicinalProductDefinitionSearchableString co \"zah\""));
+
+ // test
+ IBundleProvider result = mdrdao.search(map, new SystemRequestDetails());
+
+ // validate
+ // we expect to find our 1 matching resource
+ assertEquals(1, result.getAllResources().size());
+ assertNotNull(result.size());
+ assertEquals(1, result.size());
+ } finally {
+ // reset values
+ myStorageSettings.setFilterParameterEnabled(presetFilterParameterEnabled);
+ myStorageSettings.setAdvancedHSearchIndexing(presetAdvancedHSearchIndexing);
+ }
}
@Test
@@ -609,4 +805,5 @@ public class ResourceProviderR5Test extends BaseResourceProviderR5Test {
}
return retVal;
}
+
}
From 0397b9ddc8a8672d64fa726c26c13fbfc7b97a25 Mon Sep 17 00:00:00 2001
From: volodymyr-korzh <132366313+volodymyr-korzh@users.noreply.github.com>
Date: Thu, 20 Jun 2024 14:10:40 -0600
Subject: [PATCH 07/19] Reduce storage required for indexing - stop writing
sp_name, res_type, and sp_updated to hfj_spidx_* tables (#5941)
* Reduce storage required for indexing - implementation
---
...37-reduce-storage-for-sp-index-tables.yaml | 7 +
.../uhn/hapi/fhir/changelog/7_4_0/upgrade.md | 25 ++
.../hapi/fhir/docs/server_jpa/performance.md | 16 +
.../uhn/hapi/fhir/docs/server_jpa/schema.md | 4 +-
.../enabling_in_hapi_fhir.md | 2 +-
.../ca/uhn/fhir/jpa/config/SearchConfig.java | 14 +
.../dao/index/DaoSearchParamSynchronizer.java | 69 ++++
.../tasks/HapiFhirJpaMigrationTasks.java | 98 +++++
.../BaseSearchParamPredicateBuilder.java | 17 +-
.../index/DaoSearchParamSynchronizerTest.java | 2 +
.../jpa/model/config/PartitionSettings.java | 8 +
.../BaseResourceIndexedSearchParam.java | 104 +++--
...aseResourceIndexedSearchParamQuantity.java | 22 +-
.../ResourceIndexedComboTokenNonUnique.java | 6 +-
.../ResourceIndexedSearchParamCoords.java | 19 +-
.../ResourceIndexedSearchParamDate.java | 24 +-
.../ResourceIndexedSearchParamNumber.java | 20 +-
.../ResourceIndexedSearchParamQuantity.java | 17 +-
...eIndexedSearchParamQuantityNormalized.java | 17 +-
.../ResourceIndexedSearchParamString.java | 28 +-
.../ResourceIndexedSearchParamToken.java | 26 +-
.../entity/ResourceIndexedSearchParamUri.java | 25 +-
.../entity/SearchParamPresentEntity.java | 5 +-
.../jpa/model/entity/StorageSettings.java | 61 +++
.../IndexStorageOptimizationListener.java | 99 +++++
.../ISearchParamHashIdentityRegistry.java | 9 +
.../fhir/jpa/model/util/SearchParamHash.java | 85 ++++
.../ResourceIndexedSearchParamCoordsTest.java | 60 ++-
.../ResourceIndexedSearchParamDateTest.java | 108 ++++--
.../ResourceIndexedSearchParamNumberTest.java | 49 ++-
...exedSearchParamQuantityNormalizedTest.java | 60 ++-
...esourceIndexedSearchParamQuantityTest.java | 59 ++-
.../ResourceIndexedSearchParamStringTest.java | 61 ++-
.../ResourceIndexedSearchParamTokenTest.java | 56 ++-
.../ResourceIndexedSearchParamUriTest.java | 53 ++-
.../model/util/SearchParamHashUtilTest.java | 68 ++++
.../ResourceIndexedSearchParams.java | 19 +-
.../matcher/InMemoryResourceMatcher.java | 5 +-
.../registry/JpaSearchParamCache.java | 57 +++
.../registry/SearchParamRegistryImpl.java | 12 +-
.../ResourceIndexedSearchParamsTest.java | 34 ++
...rceMatcherR5IndexStorageOptimizedTest.java | 45 +++
.../InMemoryResourceMatcherR5Test.java | 12 +-
.../registry/JpaSearchParamCacheTest.java | 46 ++-
...esourceDaoR4IndexStorageOptimizedTest.java | 362 ++++++++++++++++++
.../provider/r4/ResourceProviderR4Test.java | 14 +
...esourceDaoR5IndexStorageOptimizedTest.java | 52 +++
.../rest/server/util/IndexedSearchParam.java | 42 ++
48 files changed, 1837 insertions(+), 266 deletions(-)
create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5937-reduce-storage-for-sp-index-tables.yaml
create mode 100644 hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/listener/IndexStorageOptimizationListener.java
create mode 100644 hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ISearchParamHashIdentityRegistry.java
create mode 100644 hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/SearchParamHash.java
create mode 100644 hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/util/SearchParamHashUtilTest.java
create mode 100644 hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcherR5IndexStorageOptimizedTest.java
create mode 100644 hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4IndexStorageOptimizedTest.java
create mode 100644 hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5IndexStorageOptimizedTest.java
create mode 100644 hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/IndexedSearchParam.java
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5937-reduce-storage-for-sp-index-tables.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5937-reduce-storage-for-sp-index-tables.yaml
new file mode 100644
index 00000000000..35629089b83
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/5937-reduce-storage-for-sp-index-tables.yaml
@@ -0,0 +1,7 @@
+---
+type: perf
+issue: 5937
+title: "A new configuration option, `StorageSettings#setIndexStorageOptimized(boolean)` has been added. If enabled,
+the server will not write data to the `SP_NAME`, `RES_TYPE`, `SP_UPDATED` columns for all `HFJ_SPIDX_xxx` tables.
+This can help reduce the overall storage size on servers where HFJ_SPIDX tables are expected to have a large
+amount of data."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/upgrade.md
index e69de29bb2d..c343e8e04ab 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/upgrade.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/upgrade.md
@@ -0,0 +1,25 @@
+## Possible migration errors on SQL Server (MSSQL)
+
+* This affects only clients running SQL Server (MSSQL) who have custom indexes on `HFJ_SPIDX` tables, which
+ include `sp_name` or `res_type` columns.
+* For those clients, migration of `sp_name` and `res_type` columns to nullable on `HFJ_SPIDX` tables may be completed with errors, as changing a column to nullable when a column is a
+ part of an index can lead to errors on SQL Server (MSSQL).
+* If client wants to use existing indexes and settings, these errors can be ignored. However, if client wants to enable both [Index Storage Optimized](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#setIndexStorageOptimized(boolean))
+ and [Index Missing Fields](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#getIndexMissingFields()) settings, manual steps are required to change `sp_name` and `res_type` nullability.
+
+To update columns to nullable in such a scenario, execute steps below:
+
+1. Indexes that include `sp_name` or `res_type` columns should be dropped:
+```sql
+DROP INDEX IDX_SP_TOKEN_REST_TYPE_SP_NAME ON HFJ_SPIDX_TOKEN;
+```
+2. The nullability of `sp_name` and `res_type` columns should be updated:
+
+```sql
+ALTER TABLE HFJ_SPIDX_TOKEN ALTER COLUMN RES_TYPE varchar(100) NULL;
+ALTER TABLE HFJ_SPIDX_TOKEN ALTER COLUMN SP_NAME varchar(100) NULL;
+```
+3. Additionally, the following index may need to be added to improve the search performance:
+```sql
+CREATE INDEX IDX_SP_TOKEN_MISSING_OPTIMIZED ON HFJ_SPIDX_TOKEN (HASH_IDENTITY, SP_MISSING, RES_ID, PARTITION_ID);
+```
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/performance.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/performance.md
index 8535ac4ce10..ece395acd99 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/performance.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/performance.md
@@ -68,3 +68,19 @@ This setting controls whether non-resource (ex: Patient is a resource, MdmLink i
Clients may want to disable this setting for performance reasons as it populates a new set of database tables when enabled.
Setting this property explicitly to false disables the feature: [Non Resource DB History](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#isNonResourceDbHistoryEnabled())
+
+# Enabling Index Storage Optimization
+
+If enabled, the server will not write data to the `SP_NAME`, `RES_TYPE`, `SP_UPDATED` columns for all `HFJ_SPIDX_xxx` tables.
+
+This setting may be enabled on servers where `HFJ_SPIDX_xxx` tables are expected to have a large amount of data (millions of rows) in order to reduce overall storage size.
+
+Setting this property explicitly to true enables the feature: [Index Storage Optimized](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#setIndexStorageOptimized(boolean))
+
+## Limitations
+
+* This setting only applies to newly inserted and updated rows in `HFJ_SPIDX_xxx` tables. All existing rows will still have values in `SP_NAME`, `RES_TYPE` and `SP_UPDATED` columns. Executing `$reindex` operation will apply storage optimization to existing data.
+
+* If this setting is enabled along with [Index Missing Fields](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#getIndexMissingFields()) setting, the following index may need to be added into the `HFJ_SPIDX_xxx` tables to improve the search performance: `(HASH_IDENTITY, SP_MISSING, RES_ID, PARTITION_ID)`.
+
+* This setting should not be enabled in combination with [Include Partition in Search Hashes](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setIncludePartitionInSearchHashes(boolean)) flag, as in this case, Partition could not be included in Search Hashes.
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/schema.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/schema.md
index c9000f3670c..67f45c680a0 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/schema.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/schema.md
@@ -502,7 +502,7 @@ The following columns are common to **all HFJ_SPIDX_xxx tables**.
SP_NAME
String
-
+
Nullable
This is the name of the search parameter being indexed.
@@ -511,7 +511,7 @@ The following columns are common to **all HFJ_SPIDX_xxx tables**.
RES_TYPE
String
-
+
Nullable
This is the name of the resource being indexed.
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_partitioning/enabling_in_hapi_fhir.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_partitioning/enabling_in_hapi_fhir.md
index ec177cccad5..d2ca0227be5 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_partitioning/enabling_in_hapi_fhir.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_partitioning/enabling_in_hapi_fhir.md
@@ -6,6 +6,6 @@ The [PartitionSettings](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir
The following settings can be enabled:
-* **Include Partition in Search Hashes** ([JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setIncludePartitionInSearchHashes(boolean))): If this feature is enabled, partition IDs will be factored into [Search Hashes](/hapi-fhir/docs/server_jpa/schema.html#search-hashes). When this flag is not set (as is the default), when a search requests a specific partition, an additional SQL WHERE predicate is added to the query to explicitly request the given partition ID. When this flag is set, this additional WHERE predicate is not necessary since the partition is factored into the hash value being searched on. Setting this flag avoids the need to manually adjust indexes against the HFJ_SPIDX tables. Note that this flag should **not be used in environments where partitioning is being used for security purposes**, since it is possible for a user to reverse engineer false hash collisions.
+* **Include Partition in Search Hashes** ([JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setIncludePartitionInSearchHashes(boolean))): If this feature is enabled, partition IDs will be factored into [Search Hashes](/hapi-fhir/docs/server_jpa/schema.html#search-hashes). When this flag is not set (as is the default), when a search requests a specific partition, an additional SQL WHERE predicate is added to the query to explicitly request the given partition ID. When this flag is set, this additional WHERE predicate is not necessary since the partition is factored into the hash value being searched on. Setting this flag avoids the need to manually adjust indexes against the HFJ_SPIDX tables. Note that this flag should **not be used in environments where partitioning is being used for security purposes**, since it is possible for a user to reverse engineer false hash collisions. This setting should not be enabled in combination with [Index Storage Optimized](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#isIndexStorageOptimized()) flag, as in this case Partition could not be included in Search Hashes.
* **Cross-Partition Reference Mode**: ([JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionSettings.html#setAllowReferencesAcrossPartitions(ca.uhn.fhir.jpa.model.config.PartitionSettings.CrossPartitionReferenceMode))): This setting controls whether resources in one partition should be allowed to create references to resources in other partitions.
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java
index 2677324da65..e1070f1de6a 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java
@@ -19,7 +19,9 @@
*/
package ca.uhn.fhir.jpa.config;
+import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
@@ -47,6 +49,7 @@ import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
import ca.uhn.fhir.rest.server.IPagingProvider;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
+import jakarta.annotation.PostConstruct;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -206,4 +209,15 @@ public class SearchConfig {
exceptionService() // singleton
);
}
+
+ @PostConstruct
+ public void validateConfiguration() {
+ if (myStorageSettings.isIndexStorageOptimized()
+ && myPartitionSettings.isPartitioningEnabled()
+ && myPartitionSettings.isIncludePartitionInSearchHashes()) {
+ throw new ConfigurationException(Msg.code(2525) + "Incorrect configuration. "
+ + "StorageSettings#isIndexStorageOptimized and PartitionSettings.isIncludePartitionInSearchHashes "
+ + "cannot be enabled at the same time.");
+ }
+ }
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoSearchParamSynchronizer.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoSearchParamSynchronizer.java
index 6d8080c47d4..7c4c205a623 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoSearchParamSynchronizer.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoSearchParamSynchronizer.java
@@ -20,7 +20,9 @@
package ca.uhn.fhir.jpa.dao.index;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndex;
+import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
+import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
import ca.uhn.fhir.jpa.util.AddRemoveCount;
import com.google.common.annotations.VisibleForTesting;
@@ -29,10 +31,12 @@ import jakarta.persistence.PersistenceContext;
import jakarta.persistence.PersistenceContextType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
@@ -42,6 +46,9 @@ import java.util.Set;
public class DaoSearchParamSynchronizer {
private static final Logger ourLog = LoggerFactory.getLogger(DaoSearchParamSynchronizer.class);
+ @Autowired
+ private StorageSettings myStorageSettings;
+
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager;
@@ -68,6 +75,11 @@ public class DaoSearchParamSynchronizer {
return retVal;
}
+ @VisibleForTesting
+ public void setStorageSettings(StorageSettings theStorageSettings) {
+ this.myStorageSettings = theStorageSettings;
+ }
+
@VisibleForTesting
public void setEntityManager(EntityManager theEntityManager) {
myEntityManager = theEntityManager;
@@ -115,6 +127,7 @@ public class DaoSearchParamSynchronizer {
List paramsToRemove = subtract(theExistingParams, newParams);
List paramsToAdd = subtract(newParams, theExistingParams);
tryToReuseIndexEntities(paramsToRemove, paramsToAdd);
+ updateExistingParamsIfRequired(theExistingParams, paramsToAdd, newParams, paramsToRemove);
for (T next : paramsToRemove) {
if (!myEntityManager.contains(next)) {
@@ -134,6 +147,62 @@ public class DaoSearchParamSynchronizer {
theAddRemoveCount.addToRemoveCount(paramsToRemove.size());
}
+ /**
+ *
+ * This method performs an update of Search Parameter's fields in the case of
+ * $reindex or update operation by:
+ * 1. Marking existing entities for updating to apply index storage optimization,
+ * if it is enabled (disabled by default).
+ * 2. Recovering SP_NAME, RES_TYPE values of Search Parameter's fields
+ * for existing entities in case if index storage optimization is disabled (but was enabled previously).
+ *
* This setting has no effect if partitioning is not enabled via {@link #isPartitioningEnabled()}.
*
+ *
+ * If {@link StorageSettings#isIndexStorageOptimized()} is enabled this setting should be set to false.
+ *
*/
public boolean isIncludePartitionInSearchHashes() {
return myIncludePartitionInSearchHashes;
@@ -71,6 +76,9 @@ public class PartitionSettings {
*
* This setting has no effect if partitioning is not enabled via {@link #isPartitioningEnabled()}.
*
+ *
+ * If {@link StorageSettings#isIndexStorageOptimized()} is enabled this setting should be set to false.
+ *
*/
public PartitionSettings setIncludePartitionInSearchHashes(boolean theIncludePartitionInSearchHashes) {
myIncludePartitionInSearchHashes = theIncludePartitionInSearchHashes;
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParam.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParam.java
index 352b90cbbc7..519abb6936e 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParam.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParam.java
@@ -22,15 +22,9 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.util.SearchParamHash;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.api.Constants;
-import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
-import ca.uhn.fhir.util.UrlUtil;
-import com.google.common.base.Charsets;
-import com.google.common.hash.HashCode;
-import com.google.common.hash.HashFunction;
-import com.google.common.hash.Hasher;
-import com.google.common.hash.Hashing;
import jakarta.persistence.Column;
import jakarta.persistence.MappedSuperclass;
import jakarta.persistence.Temporal;
@@ -46,16 +40,6 @@ import java.util.List;
@MappedSuperclass
public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex {
static final int MAX_SP_NAME = 100;
- /**
- * Don't change this without careful consideration. You will break existing hashes!
- */
- private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(0);
-
- /**
- * Don't make this public 'cause nobody better be able to modify it!
- */
- private static final byte[] DELIMITER_BYTES = "|".getBytes(Charsets.UTF_8);
-
private static final long serialVersionUID = 1L;
@GenericField
@@ -63,18 +47,26 @@ public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex {
private boolean myMissing = false;
@FullTextField
- @Column(name = "SP_NAME", length = MAX_SP_NAME, nullable = false)
+ @Column(name = "SP_NAME", length = MAX_SP_NAME)
private String myParamName;
@Column(name = "RES_ID", insertable = false, updatable = false, nullable = false)
private Long myResourcePid;
@FullTextField
- @Column(name = "RES_TYPE", updatable = false, nullable = false, length = Constants.MAX_RESOURCE_NAME_LENGTH)
+ @Column(name = "RES_TYPE", length = Constants.MAX_RESOURCE_NAME_LENGTH)
private String myResourceType;
+ /**
+ * Composite of resourceType, paramName, and partition info if configured.
+ * Combined with the various date fields for a query.
+ * Nullable to allow optimized storage.
+ */
+ @Column(name = "HASH_IDENTITY", nullable = true)
+ protected Long myHashIdentity;
+
@GenericField
- @Column(name = "SP_UPDATED", nullable = true) // TODO: make this false after HAPI 2.3
+ @Column(name = "SP_UPDATED")
@Temporal(TemporalType.TIMESTAMP)
private Date myUpdated;
@@ -98,6 +90,28 @@ public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex {
}
}
+ /**
+ * Restore SP_NAME without clearing hashes
+ */
+ public void restoreParamName(String theParamName) {
+ if (myParamName == null) {
+ myParamName = theParamName;
+ }
+ }
+
+ /**
+ * Set SP_NAME, RES_TYPE, SP_UPDATED to null without clearing hashes
+ */
+ public void optimizeIndexStorage() {
+ myParamName = null;
+ myResourceType = null;
+ myUpdated = null;
+ }
+
+ public boolean isIndexStorageOptimized() {
+ return myParamName == null || myResourceType == null || myUpdated == null;
+ }
+
// MB pushed these down to the individual SP classes so we could name the FK in the join annotation
/**
* Get the Resource this SP indexes
@@ -111,6 +125,7 @@ public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex {
BaseResourceIndexedSearchParam source = (BaseResourceIndexedSearchParam) theSource;
myMissing = source.myMissing;
myParamName = source.myParamName;
+ myResourceType = source.myResourceType;
myUpdated = source.myUpdated;
myStorageSettings = source.myStorageSettings;
myPartitionSettings = source.myPartitionSettings;
@@ -129,6 +144,14 @@ public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex {
myResourceType = theResourceType;
}
+ public void setHashIdentity(Long theHashIdentity) {
+ myHashIdentity = theHashIdentity;
+ }
+
+ public Long getHashIdentity() {
+ return myHashIdentity;
+ }
+
public Date getUpdated() {
return myUpdated;
}
@@ -184,7 +207,8 @@ public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex {
RequestPartitionId theRequestPartitionId,
String theResourceType,
String theParamName) {
- return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName);
+ return SearchParamHash.hashSearchParam(
+ thePartitionSettings, theRequestPartitionId, theResourceType, theParamName);
}
public static long calculateHashIdentity(
@@ -200,42 +224,6 @@ public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex {
values[i + 2] = theAdditionalValues.get(i);
}
- return hash(thePartitionSettings, theRequestPartitionId, values);
- }
-
- /**
- * Applies a fast and consistent hashing algorithm to a set of strings
- */
- static long hash(
- PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String... theValues) {
- Hasher hasher = HASH_FUNCTION.newHasher();
-
- if (thePartitionSettings.isPartitioningEnabled()
- && thePartitionSettings.isIncludePartitionInSearchHashes()
- && theRequestPartitionId != null) {
- if (theRequestPartitionId.getPartitionIds().size() > 1) {
- throw new InternalErrorException(Msg.code(1527)
- + "Can not search multiple partitions when partitions are included in search hashes");
- }
- Integer partitionId = theRequestPartitionId.getFirstPartitionIdOrNull();
- if (partitionId != null) {
- hasher.putInt(partitionId);
- }
- }
-
- for (String next : theValues) {
- if (next == null) {
- hasher.putByte((byte) 0);
- } else {
- next = UrlUtil.escapeUrlParam(next);
- byte[] bytes = next.getBytes(Charsets.UTF_8);
- hasher.putBytes(bytes);
- }
- hasher.putBytes(DELIMITER_BYTES);
- }
-
- HashCode hashCode = hasher.hash();
- long retVal = hashCode.asLong();
- return retVal;
+ return SearchParamHash.hashSearchParam(thePartitionSettings, theRequestPartitionId, values);
}
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParamQuantity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParamQuantity.java
index 23f6f13019f..c8a2eb3aa4a 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParamQuantity.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParamQuantity.java
@@ -26,6 +26,8 @@ import jakarta.persistence.MappedSuperclass;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
+import static ca.uhn.fhir.jpa.model.util.SearchParamHash.hashSearchParam;
+
@MappedSuperclass
public abstract class BaseResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearchParam {
@@ -51,11 +53,6 @@ public abstract class BaseResourceIndexedSearchParamQuantity extends BaseResourc
*/
@Column(name = "HASH_IDENTITY_SYS_UNITS", nullable = true)
private Long myHashIdentitySystemAndUnits;
- /**
- * @since 3.5.0 - At some point this should be made not-null
- */
- @Column(name = "HASH_IDENTITY", nullable = true)
- private Long myHashIdentity;
/**
* Constructor
@@ -88,14 +85,6 @@ public abstract class BaseResourceIndexedSearchParamQuantity extends BaseResourc
getPartitionSettings(), getPartitionId(), resourceType, paramName, system, units));
}
- public Long getHashIdentity() {
- return myHashIdentity;
- }
-
- public void setHashIdentity(Long theHashIdentity) {
- myHashIdentity = theHashIdentity;
- }
-
public Long getHashIdentityAndUnits() {
return myHashIdentityAndUnits;
}
@@ -131,8 +120,6 @@ public abstract class BaseResourceIndexedSearchParamQuantity extends BaseResourc
@Override
public int hashCode() {
HashCodeBuilder b = new HashCodeBuilder();
- b.append(getResourceType());
- b.append(getParamName());
b.append(getHashIdentity());
b.append(getHashIdentityAndUnits());
b.append(getHashIdentitySystemAndUnits());
@@ -158,7 +145,8 @@ public abstract class BaseResourceIndexedSearchParamQuantity extends BaseResourc
String theParamName,
String theSystem,
String theUnits) {
- return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, theSystem, theUnits);
+ return hashSearchParam(
+ thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, theSystem, theUnits);
}
public static long calculateHashUnits(
@@ -177,6 +165,6 @@ public abstract class BaseResourceIndexedSearchParamQuantity extends BaseResourc
String theResourceType,
String theParamName,
String theUnits) {
- return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, theUnits);
+ return hashSearchParam(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, theUnits);
}
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboTokenNonUnique.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboTokenNonUnique.java
index 9e6ab3315c9..acc5dc49d4a 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboTokenNonUnique.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboTokenNonUnique.java
@@ -39,7 +39,7 @@ import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.hl7.fhir.instance.model.api.IIdType;
-import static ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam.hash;
+import static ca.uhn.fhir.jpa.model.util.SearchParamHash.hashSearchParam;
@Entity
@Table(
@@ -206,12 +206,12 @@ public class ResourceIndexedComboTokenNonUnique extends BaseResourceIndex
public static long calculateHashComplete(
PartitionSettings partitionSettings, PartitionablePartitionId thePartitionId, String queryString) {
RequestPartitionId requestPartitionId = PartitionablePartitionId.toRequestPartitionId(thePartitionId);
- return hash(partitionSettings, requestPartitionId, queryString);
+ return hashSearchParam(partitionSettings, requestPartitionId, queryString);
}
public static long calculateHashComplete(
PartitionSettings partitionSettings, RequestPartitionId partitionId, String queryString) {
- return hash(partitionSettings, partitionId, queryString);
+ return hashSearchParam(partitionSettings, partitionId, queryString);
}
/**
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java
index a66e5f6f564..bf6e7baaed3 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java
@@ -20,11 +20,13 @@
package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.listener.IndexStorageOptimizationListener;
import ca.uhn.fhir.model.api.IQueryParameterType;
import jakarta.annotation.Nullable;
import jakarta.persistence.Column;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
+import jakarta.persistence.EntityListeners;
import jakarta.persistence.FetchType;
import jakarta.persistence.ForeignKey;
import jakarta.persistence.GeneratedValue;
@@ -41,6 +43,7 @@ import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
@Embeddable
+@EntityListeners(IndexStorageOptimizationListener.class)
@Entity
@Table(
name = "HFJ_SPIDX_COORDS",
@@ -68,11 +71,6 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_COORDS")
@Column(name = "SP_ID")
private Long myId;
- /**
- * @since 3.5.0 - At some point this should be made not-null
- */
- @Column(name = "HASH_IDENTITY", nullable = true)
- private Long myHashIdentity;
@ManyToOne(
optional = false,
@@ -130,8 +128,7 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
}
ResourceIndexedSearchParamCoords obj = (ResourceIndexedSearchParamCoords) theObj;
EqualsBuilder b = new EqualsBuilder();
- b.append(getResourceType(), obj.getResourceType());
- b.append(getParamName(), obj.getParamName());
+ b.append(getHashIdentity(), obj.getHashIdentity());
b.append(getLatitude(), obj.getLatitude());
b.append(getLongitude(), obj.getLongitude());
b.append(isMissing(), obj.isMissing());
@@ -147,10 +144,6 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
myHashIdentity = source.myHashIdentity;
}
- public void setHashIdentity(Long theHashIdentity) {
- myHashIdentity = theHashIdentity;
- }
-
@Override
public Long getId() {
return myId;
@@ -184,10 +177,10 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
@Override
public int hashCode() {
HashCodeBuilder b = new HashCodeBuilder();
- b.append(getParamName());
- b.append(getResourceType());
+ b.append(getHashIdentity());
b.append(getLatitude());
b.append(getLongitude());
+ b.append(isMissing());
return b.toHashCode();
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDate.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDate.java
index 401aa7dd66f..45259d4a9f5 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDate.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDate.java
@@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.listener.IndexStorageOptimizationListener;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.model.primitive.InstantDt;
@@ -29,6 +30,7 @@ import ca.uhn.fhir.util.DateUtils;
import jakarta.persistence.Column;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
+import jakarta.persistence.EntityListeners;
import jakarta.persistence.FetchType;
import jakarta.persistence.ForeignKey;
import jakarta.persistence.GeneratedValue;
@@ -55,6 +57,7 @@ import java.text.SimpleDateFormat;
import java.util.Date;
@Embeddable
+@EntityListeners(IndexStorageOptimizationListener.class)
@Entity
@Table(
name = "HFJ_SPIDX_DATE",
@@ -109,14 +112,6 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
@Column(name = "SP_ID")
private Long myId;
- /**
- * Composite of resourceType, paramName, and partition info if configured.
- * Combined with the various date fields for a query.
- * @since 3.5.0 - At some point this should be made not-null
- */
- @Column(name = "HASH_IDENTITY", nullable = true)
- private Long myHashIdentity;
-
@ManyToOne(
optional = false,
fetch = FetchType.LAZY,
@@ -264,8 +259,7 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
}
ResourceIndexedSearchParamDate obj = (ResourceIndexedSearchParamDate) theObj;
EqualsBuilder b = new EqualsBuilder();
- b.append(getResourceType(), obj.getResourceType());
- b.append(getParamName(), obj.getParamName());
+ b.append(getHashIdentity(), obj.getHashIdentity());
b.append(getTimeFromDate(getValueHigh()), getTimeFromDate(obj.getValueHigh()));
b.append(getTimeFromDate(getValueLow()), getTimeFromDate(obj.getValueLow()));
b.append(getValueLowDateOrdinal(), obj.getValueLowDateOrdinal());
@@ -274,10 +268,6 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
return b.isEquals();
}
- public void setHashIdentity(Long theHashIdentity) {
- myHashIdentity = theHashIdentity;
- }
-
@Override
public Long getId() {
return myId;
@@ -316,10 +306,12 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
@Override
public int hashCode() {
HashCodeBuilder b = new HashCodeBuilder();
- b.append(getResourceType());
- b.append(getParamName());
+ b.append(getHashIdentity());
b.append(getTimeFromDate(getValueHigh()));
b.append(getTimeFromDate(getValueLow()));
+ b.append(getValueHighDateOrdinal());
+ b.append(getValueLowDateOrdinal());
+ b.append(isMissing());
return b.toHashCode();
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumber.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumber.java
index a1527437dc5..902e3ac6c0c 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumber.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumber.java
@@ -20,11 +20,13 @@
package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.listener.IndexStorageOptimizationListener;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.param.NumberParam;
import jakarta.persistence.Column;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
+import jakarta.persistence.EntityListeners;
import jakarta.persistence.FetchType;
import jakarta.persistence.ForeignKey;
import jakarta.persistence.GeneratedValue;
@@ -47,6 +49,7 @@ import java.math.BigDecimal;
import java.util.Objects;
@Embeddable
+@EntityListeners(IndexStorageOptimizationListener.class)
@Entity
@Table(
name = "HFJ_SPIDX_NUMBER",
@@ -69,11 +72,6 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_NUMBER")
@Column(name = "SP_ID")
private Long myId;
- /**
- * @since 3.5.0 - At some point this should be made not-null
- */
- @Column(name = "HASH_IDENTITY", nullable = true)
- private Long myHashIdentity;
@ManyToOne(
optional = false,
@@ -120,10 +118,6 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
setHashIdentity(calculateHashIdentity(getPartitionSettings(), getPartitionId(), resourceType, paramName));
}
- public Long getHashIdentity() {
- return myHashIdentity;
- }
-
@Override
public boolean equals(Object theObj) {
if (this == theObj) {
@@ -137,8 +131,6 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
}
ResourceIndexedSearchParamNumber obj = (ResourceIndexedSearchParamNumber) theObj;
EqualsBuilder b = new EqualsBuilder();
- b.append(getResourceType(), obj.getResourceType());
- b.append(getParamName(), obj.getParamName());
b.append(getHashIdentity(), obj.getHashIdentity());
b.append(normalizeForEqualityComparison(getValue()), normalizeForEqualityComparison(obj.getValue()));
b.append(isMissing(), obj.isMissing());
@@ -152,10 +144,6 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
return theValue.doubleValue();
}
- public void setHashIdentity(Long theHashIdentity) {
- myHashIdentity = theHashIdentity;
- }
-
@Override
public Long getId() {
return myId;
@@ -177,8 +165,6 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
@Override
public int hashCode() {
HashCodeBuilder b = new HashCodeBuilder();
- b.append(getResourceType());
- b.append(getParamName());
b.append(getHashIdentity());
b.append(normalizeForEqualityComparison(getValue()));
b.append(isMissing());
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantity.java
index 6b38f3b52e1..0f1b2bd5568 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantity.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantity.java
@@ -20,11 +20,13 @@
package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.listener.IndexStorageOptimizationListener;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.param.QuantityParam;
import jakarta.persistence.Column;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
+import jakarta.persistence.EntityListeners;
import jakarta.persistence.FetchType;
import jakarta.persistence.ForeignKey;
import jakarta.persistence.GeneratedValue;
@@ -36,6 +38,7 @@ import jakarta.persistence.ManyToOne;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ScaledNumberField;
@@ -48,6 +51,7 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
// @formatter:off
@Embeddable
+@EntityListeners(IndexStorageOptimizationListener.class)
@Entity
@Table(
name = "HFJ_SPIDX_QUANTITY",
@@ -173,8 +177,6 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
}
ResourceIndexedSearchParamQuantity obj = (ResourceIndexedSearchParamQuantity) theObj;
EqualsBuilder b = new EqualsBuilder();
- b.append(getResourceType(), obj.getResourceType());
- b.append(getParamName(), obj.getParamName());
b.append(getHashIdentity(), obj.getHashIdentity());
b.append(getHashIdentityAndUnits(), obj.getHashIdentityAndUnits());
b.append(getHashIdentitySystemAndUnits(), obj.getHashIdentitySystemAndUnits());
@@ -183,6 +185,17 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
return b.isEquals();
}
+ @Override
+ public int hashCode() {
+ HashCodeBuilder b = new HashCodeBuilder();
+ b.append(getHashIdentity());
+ b.append(getHashIdentityAndUnits());
+ b.append(getHashIdentitySystemAndUnits());
+ b.append(isMissing());
+ b.append(getValue());
+ return b.toHashCode();
+ }
+
@Override
public boolean matches(IQueryParameterType theParam) {
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityNormalized.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityNormalized.java
index 4bf738b747a..b235a86bc09 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityNormalized.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityNormalized.java
@@ -20,12 +20,14 @@
package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.listener.IndexStorageOptimizationListener;
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.param.QuantityParam;
import jakarta.persistence.Column;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
+import jakarta.persistence.EntityListeners;
import jakarta.persistence.FetchType;
import jakarta.persistence.ForeignKey;
import jakarta.persistence.GeneratedValue;
@@ -37,6 +39,7 @@ import jakarta.persistence.ManyToOne;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.fhir.ucum.Pair;
@@ -50,6 +53,7 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
// @formatter:off
@Embeddable
+@EntityListeners(IndexStorageOptimizationListener.class)
@Entity
@Table(
name = "HFJ_SPIDX_QUANTITY_NRML",
@@ -189,8 +193,6 @@ public class ResourceIndexedSearchParamQuantityNormalized extends BaseResourceIn
}
ResourceIndexedSearchParamQuantityNormalized obj = (ResourceIndexedSearchParamQuantityNormalized) theObj;
EqualsBuilder b = new EqualsBuilder();
- b.append(getResourceType(), obj.getResourceType());
- b.append(getParamName(), obj.getParamName());
b.append(getHashIdentity(), obj.getHashIdentity());
b.append(getHashIdentityAndUnits(), obj.getHashIdentityAndUnits());
b.append(getHashIdentitySystemAndUnits(), obj.getHashIdentitySystemAndUnits());
@@ -199,6 +201,17 @@ public class ResourceIndexedSearchParamQuantityNormalized extends BaseResourceIn
return b.isEquals();
}
+ @Override
+ public int hashCode() {
+ HashCodeBuilder b = new HashCodeBuilder();
+ b.append(getHashIdentity());
+ b.append(getHashIdentityAndUnits());
+ b.append(getHashIdentitySystemAndUnits());
+ b.append(isMissing());
+ b.append(getValue());
+ return b.toHashCode();
+ }
+
@Override
public boolean matches(IQueryParameterType theParam) {
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamString.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamString.java
index c1e5b1ac19c..5795c589602 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamString.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamString.java
@@ -22,12 +22,14 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.listener.IndexStorageOptimizationListener;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.util.StringUtil;
import jakarta.persistence.Column;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
+import jakarta.persistence.EntityListeners;
import jakarta.persistence.ForeignKey;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
@@ -42,10 +44,12 @@ import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
+import static ca.uhn.fhir.jpa.model.util.SearchParamHash.hashSearchParam;
import static org.apache.commons.lang3.StringUtils.defaultString;
// @formatter:off
@Embeddable
+@EntityListeners(IndexStorageOptimizationListener.class)
@Entity
@Table(
name = "HFJ_SPIDX_STRING",
@@ -97,11 +101,6 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
*/
@Column(name = "HASH_NORM_PREFIX", nullable = true)
private Long myHashNormalizedPrefix;
- /**
- * @since 3.6.0 - At some point this should be made not-null
- */
- @Column(name = "HASH_IDENTITY", nullable = true)
- private Long myHashIdentity;
/**
* @since 3.4.0 - At some point this should be made not-null
*/
@@ -180,24 +179,15 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
}
ResourceIndexedSearchParamString obj = (ResourceIndexedSearchParamString) theObj;
EqualsBuilder b = new EqualsBuilder();
- b.append(getResourceType(), obj.getResourceType());
- b.append(getParamName(), obj.getParamName());
b.append(getValueExact(), obj.getValueExact());
b.append(getHashIdentity(), obj.getHashIdentity());
b.append(getHashExact(), obj.getHashExact());
b.append(getHashNormalizedPrefix(), obj.getHashNormalizedPrefix());
b.append(getValueNormalized(), obj.getValueNormalized());
+ b.append(isMissing(), obj.isMissing());
return b.isEquals();
}
- private Long getHashIdentity() {
- return myHashIdentity;
- }
-
- public void setHashIdentity(Long theHashIdentity) {
- myHashIdentity = theHashIdentity;
- }
-
public Long getHashExact() {
return myHashExact;
}
@@ -251,13 +241,12 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
@Override
public int hashCode() {
HashCodeBuilder b = new HashCodeBuilder();
- b.append(getResourceType());
- b.append(getParamName());
b.append(getValueExact());
b.append(getHashIdentity());
b.append(getHashExact());
b.append(getHashNormalizedPrefix());
b.append(getValueNormalized());
+ b.append(isMissing());
return b.toHashCode();
}
@@ -306,7 +295,8 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
String theResourceType,
String theParamName,
String theValueExact) {
- return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, theValueExact);
+ return hashSearchParam(
+ thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, theValueExact);
}
public static long calculateHashNormalized(
@@ -345,7 +335,7 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
}
String value = StringUtil.left(theValueNormalized, hashPrefixLength);
- return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, value);
+ return hashSearchParam(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, value);
}
@Override
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java
index 9066f9f25db..cfe3d886249 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java
@@ -21,12 +21,14 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.listener.IndexStorageOptimizationListener;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.param.TokenParam;
import jakarta.persistence.Column;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
+import jakarta.persistence.EntityListeners;
import jakarta.persistence.FetchType;
import jakarta.persistence.ForeignKey;
import jakarta.persistence.GeneratedValue;
@@ -46,10 +48,12 @@ import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
+import static ca.uhn.fhir.jpa.model.util.SearchParamHash.hashSearchParam;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.trim;
@Embeddable
+@EntityListeners(IndexStorageOptimizationListener.class)
@Entity
@Table(
name = "HFJ_SPIDX_TOKEN",
@@ -89,11 +93,6 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_TOKEN")
@Column(name = "SP_ID")
private Long myId;
- /**
- * @since 3.4.0 - At some point this should be made not-null
- */
- @Column(name = "HASH_IDENTITY", nullable = true)
- private Long myHashIdentity;
/**
* @since 3.4.0 - At some point this should be made not-null
*/
@@ -217,9 +216,11 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
}
ResourceIndexedSearchParamToken obj = (ResourceIndexedSearchParamToken) theObj;
EqualsBuilder b = new EqualsBuilder();
+ b.append(getHashIdentity(), obj.getHashIdentity());
b.append(getHashSystem(), obj.getHashSystem());
b.append(getHashValue(), obj.getHashValue());
b.append(getHashSystemAndValue(), obj.getHashSystemAndValue());
+ b.append(isMissing(), obj.isMissing());
return b.isEquals();
}
@@ -231,10 +232,6 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
myHashSystem = theHashSystem;
}
- private void setHashIdentity(Long theHashIdentity) {
- myHashIdentity = theHashIdentity;
- }
-
public Long getHashSystemAndValue() {
return myHashSystemAndValue;
}
@@ -283,11 +280,11 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
@Override
public int hashCode() {
HashCodeBuilder b = new HashCodeBuilder();
- b.append(getResourceType());
+ b.append(getHashIdentity());
b.append(getHashValue());
b.append(getHashSystem());
b.append(getHashSystemAndValue());
-
+ b.append(isMissing());
return b.toHashCode();
}
@@ -362,7 +359,8 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
String theResourceType,
String theParamName,
String theSystem) {
- return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, trim(theSystem));
+ return hashSearchParam(
+ thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, trim(theSystem));
}
public static long calculateHashSystemAndValue(
@@ -384,7 +382,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
String theParamName,
String theSystem,
String theValue) {
- return hash(
+ return hashSearchParam(
thePartitionSettings,
theRequestPartitionId,
theResourceType,
@@ -410,7 +408,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
String theParamName,
String theValue) {
String value = trim(theValue);
- return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, value);
+ return hashSearchParam(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, value);
}
@Override
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java
index d16396269b4..02a5f23a16f 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java
@@ -21,11 +21,13 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.listener.IndexStorageOptimizationListener;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.param.UriParam;
import jakarta.persistence.Column;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
+import jakarta.persistence.EntityListeners;
import jakarta.persistence.FetchType;
import jakarta.persistence.ForeignKey;
import jakarta.persistence.GeneratedValue;
@@ -42,9 +44,11 @@ import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
+import static ca.uhn.fhir.jpa.model.util.SearchParamHash.hashSearchParam;
import static org.apache.commons.lang3.StringUtils.defaultString;
@Embeddable
+@EntityListeners(IndexStorageOptimizationListener.class)
@Entity
@Table(
name = "HFJ_SPIDX_URI",
@@ -84,11 +88,6 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
*/
@Column(name = "HASH_URI", nullable = true)
private Long myHashUri;
- /**
- * @since 3.5.0 - At some point this should be made not-null
- */
- @Column(name = "HASH_IDENTITY", nullable = true)
- private Long myHashIdentity;
@ManyToOne(
optional = false,
@@ -161,22 +160,13 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
}
ResourceIndexedSearchParamUri obj = (ResourceIndexedSearchParamUri) theObj;
EqualsBuilder b = new EqualsBuilder();
- b.append(getResourceType(), obj.getResourceType());
- b.append(getParamName(), obj.getParamName());
b.append(getUri(), obj.getUri());
b.append(getHashUri(), obj.getHashUri());
b.append(getHashIdentity(), obj.getHashIdentity());
+ b.append(isMissing(), obj.isMissing());
return b.isEquals();
}
- private Long getHashIdentity() {
- return myHashIdentity;
- }
-
- private void setHashIdentity(long theHashIdentity) {
- myHashIdentity = theHashIdentity;
- }
-
public Long getHashUri() {
return myHashUri;
}
@@ -207,11 +197,10 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
@Override
public int hashCode() {
HashCodeBuilder b = new HashCodeBuilder();
- b.append(getResourceType());
- b.append(getParamName());
b.append(getUri());
b.append(getHashUri());
b.append(getHashIdentity());
+ b.append(isMissing());
return b.toHashCode();
}
@@ -257,7 +246,7 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
String theResourceType,
String theParamName,
String theUri) {
- return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, theUri);
+ return hashSearchParam(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, theUri);
}
@Override
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/SearchParamPresentEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/SearchParamPresentEntity.java
index 9270f6e163c..3f931b56952 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/SearchParamPresentEntity.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/SearchParamPresentEntity.java
@@ -42,6 +42,8 @@ import org.apache.commons.lang3.builder.ToStringStyle;
import java.io.Serializable;
+import static ca.uhn.fhir.jpa.model.util.SearchParamHash.hashSearchParam;
+
@Entity
@Table(
name = "HFJ_RES_PARAM_PRESENT",
@@ -212,7 +214,6 @@ public class SearchParamPresentEntity extends BasePartitionable implements Seria
String theParamName,
Boolean thePresent) {
String string = thePresent != null ? Boolean.toString(thePresent) : Boolean.toString(false);
- return BaseResourceIndexedSearchParam.hash(
- thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, string);
+ return hashSearchParam(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, string);
}
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java
index 630b295c946..a6c80cf639b 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java
@@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.context.ParserOptions;
import ca.uhn.fhir.i18n.Msg;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.util.ISequenceValueMassager;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc;
@@ -134,6 +135,14 @@ public class StorageSettings {
*/
private boolean myValidateResourceStatusForPackageUpload = true;
+ /**
+ * If set to true, the server will not write data to the SP_NAME, RES_TYPE, SP_UPDATED
+ * columns for all HFJ_SPIDX tables.
+ *
+ * @since 7.4.0
+ */
+ private boolean myIndexStorageOptimized = false;
+
/**
* Constructor
*/
@@ -277,6 +286,58 @@ public class StorageSettings {
myIndexMissingFieldsEnabled = theIndexMissingFields;
}
+ /**
+ * If set to true (default is false), the server will not write data
+ * to the SP_NAME, RES_TYPE, SP_UPDATED columns for all HFJ_SPIDX tables.
+ *
+ * This feature may be enabled on servers where HFJ_SPIDX tables are expected
+ * to have a large amount of data (millions of rows) in order to reduce overall storage size.
+ *
+ *
+ * Note that this setting only applies to newly inserted and updated rows in HFJ_SPIDX tables.
+ * In order to apply this optimization setting to existing HFJ_SPIDX index rows,
+ * $reindex operation should be executed at the instance or server level.
+ *
+ *
+ * If this setting is enabled, {@link PartitionSettings#isIncludePartitionInSearchHashes()} should be disabled.
+ *
+ *
+ * If {@link StorageSettings#getIndexMissingFields()} is enabled, the following index may need to be added
+ * into the HFJ_SPIDX tables to improve the search performance: HASH_IDENTITY, SP_MISSING, RES_ID, PARTITION_ID
+ *
+ *
+ * @since 7.4.0
+ */
+ public boolean isIndexStorageOptimized() {
+ return myIndexStorageOptimized;
+ }
+
+ /**
+ * If set to true (default is false), the server will not write data
+ * to the SP_NAME, RES_TYPE, SP_UPDATED columns for all HFJ_SPIDX tables.
+ *
+ * This feature may be enabled on servers where HFJ_SPIDX tables are expected
+ * to have a large amount of data (millions of rows) in order to reduce overall storage size.
+ *
+ *
+ * Note that this setting only applies to newly inserted and updated rows in HFJ_SPIDX tables.
+ * In order to apply this optimization setting to existing HFJ_SPIDX index rows,
+ * $reindex operation should be executed at the instance or server level.
+ *
+ *
+ * If this setting is enabled, {@link PartitionSettings#isIncludePartitionInSearchHashes()} should be set to false.
+ *
+ *
+ * If {@link StorageSettings#getIndexMissingFields()} ()} is enabled, the following index may need to be added
+ * into the HFJ_SPIDX tables to improve the search performance: HASH_IDENTITY, SP_MISSING, RES_ID, PARTITION_ID
+ *
+ *
+ * @since 7.4.0
+ */
+ public void setIndexStorageOptimized(boolean theIndexStorageOptimized) {
+ myIndexStorageOptimized = theIndexStorageOptimized;
+ }
+
/**
* If this is enabled (disabled by default), Mass Ingestion Mode is enabled. In this mode, a number of
* runtime checks are disabled. This mode is designed for rapid backloading of data while the system is not
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/listener/IndexStorageOptimizationListener.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/listener/IndexStorageOptimizationListener.java
new file mode 100644
index 00000000000..1cb857418d1
--- /dev/null
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/listener/IndexStorageOptimizationListener.java
@@ -0,0 +1,99 @@
+/*
+ * #%L
+ * HAPI FHIR JPA Model
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.jpa.model.listener;
+
+import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
+import ca.uhn.fhir.jpa.model.entity.StorageSettings;
+import ca.uhn.fhir.jpa.model.search.ISearchParamHashIdentityRegistry;
+import ca.uhn.fhir.rest.server.util.IndexedSearchParam;
+import jakarta.persistence.PostLoad;
+import jakarta.persistence.PostPersist;
+import jakarta.persistence.PostUpdate;
+import jakarta.persistence.PrePersist;
+import jakarta.persistence.PreUpdate;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
+
+import java.util.Optional;
+
+/**
+ * Sets SP_NAME, RES_TYPE, SP_UPDATED column values to null for all HFJ_SPIDX tables
+ * if storage setting {@link ca.uhn.fhir.jpa.model.entity.StorageSettings#isIndexStorageOptimized()} is enabled.
+ *
+ * Using EntityListener to change HFJ_SPIDX column values right before insert/update to database.
+ *
+ *
+ * As SP_NAME, RES_TYPE values could still be used after merge/persist to database, we are restoring
+ * them from HASH_IDENTITY value.
+ *
+ * See {@link ca.uhn.fhir.jpa.model.entity.StorageSettings#setIndexStorageOptimized(boolean)}
+ */
+public class IndexStorageOptimizationListener {
+
+ public IndexStorageOptimizationListener(
+ @Autowired StorageSettings theStorageSettings, @Autowired ApplicationContext theApplicationContext) {
+ this.myStorageSettings = theStorageSettings;
+ this.myApplicationContext = theApplicationContext;
+ }
+
+ private final StorageSettings myStorageSettings;
+ private final ApplicationContext myApplicationContext;
+
+ @PrePersist
+ @PreUpdate
+ public void optimizeSearchParams(Object theEntity) {
+ if (myStorageSettings.isIndexStorageOptimized() && theEntity instanceof BaseResourceIndexedSearchParam) {
+ ((BaseResourceIndexedSearchParam) theEntity).optimizeIndexStorage();
+ }
+ }
+
+ @PostLoad
+ @PostPersist
+ @PostUpdate
+ public void restoreSearchParams(Object theEntity) {
+ if (myStorageSettings.isIndexStorageOptimized() && theEntity instanceof BaseResourceIndexedSearchParam) {
+ restoreSearchParams((BaseResourceIndexedSearchParam) theEntity);
+ }
+ }
+
+ /**
+ * As SP_NAME, RES_TYPE values could still be used after merge/persist to database (mostly by tests),
+ * we are restoring them from HASH_IDENTITY value.
+ * Note that SP_NAME, RES_TYPE values are not recovered if
+ * {@link ca.uhn.fhir.jpa.model.entity.StorageSettings#isIndexOnContainedResources()} or
+ * {@link ca.uhn.fhir.jpa.model.entity.StorageSettings#isIndexOnContainedResourcesRecursively()}
+ * settings are enabled.
+ */
+ private void restoreSearchParams(BaseResourceIndexedSearchParam theResourceIndexedSearchParam) {
+ // getting ISearchParamHashIdentityRegistry from the App Context as it is initialized after EntityListeners
+ ISearchParamHashIdentityRegistry searchParamRegistry =
+ myApplicationContext.getBean(ISearchParamHashIdentityRegistry.class);
+ Optional indexedSearchParamOptional =
+ searchParamRegistry.getIndexedSearchParamByHashIdentity(
+ theResourceIndexedSearchParam.getHashIdentity());
+
+ if (indexedSearchParamOptional.isPresent()) {
+ theResourceIndexedSearchParam.setResourceType(
+ indexedSearchParamOptional.get().getResourceType());
+ theResourceIndexedSearchParam.restoreParamName(
+ indexedSearchParamOptional.get().getParameterName());
+ }
+ }
+}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ISearchParamHashIdentityRegistry.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ISearchParamHashIdentityRegistry.java
new file mode 100644
index 00000000000..343ca0d0c08
--- /dev/null
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ISearchParamHashIdentityRegistry.java
@@ -0,0 +1,9 @@
+package ca.uhn.fhir.jpa.model.search;
+
+import ca.uhn.fhir.rest.server.util.IndexedSearchParam;
+
+import java.util.Optional;
+
+public interface ISearchParamHashIdentityRegistry {
+ Optional getIndexedSearchParamByHashIdentity(Long theHashIdentity);
+}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/SearchParamHash.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/SearchParamHash.java
new file mode 100644
index 00000000000..5ca532e1140
--- /dev/null
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/SearchParamHash.java
@@ -0,0 +1,85 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Model
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.jpa.model.util;
+
+import ca.uhn.fhir.i18n.Msg;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import ca.uhn.fhir.util.UrlUtil;
+import com.google.common.base.Charsets;
+import com.google.common.hash.HashCode;
+import com.google.common.hash.HashFunction;
+import com.google.common.hash.Hasher;
+import com.google.common.hash.Hashing;
+
+/**
+ * Utility class for calculating hashes of SearchParam entity fields.
+ */
+public class SearchParamHash {
+
+ /**
+ * Don't change this without careful consideration. You will break existing hashes!
+ */
+ private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(0);
+
+ /**
+ * Don't make this public 'cause nobody better be able to modify it!
+ */
+ private static final byte[] DELIMITER_BYTES = "|".getBytes(Charsets.UTF_8);
+
+ private SearchParamHash() {}
+
+ /**
+ * Applies a fast and consistent hashing algorithm to a set of strings
+ */
+ public static long hashSearchParam(
+ PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String... theValues) {
+ Hasher hasher = HASH_FUNCTION.newHasher();
+
+ if (thePartitionSettings.isPartitioningEnabled()
+ && thePartitionSettings.isIncludePartitionInSearchHashes()
+ && theRequestPartitionId != null) {
+ if (theRequestPartitionId.getPartitionIds().size() > 1) {
+ throw new InternalErrorException(Msg.code(1527)
+ + "Can not search multiple partitions when partitions are included in search hashes");
+ }
+ Integer partitionId = theRequestPartitionId.getFirstPartitionIdOrNull();
+ if (partitionId != null) {
+ hasher.putInt(partitionId);
+ }
+ }
+
+ for (String next : theValues) {
+ if (next == null) {
+ hasher.putByte((byte) 0);
+ } else {
+ next = UrlUtil.escapeUrlParam(next);
+ byte[] bytes = next.getBytes(Charsets.UTF_8);
+ hasher.putBytes(bytes);
+ }
+ hasher.putBytes(DELIMITER_BYTES);
+ }
+
+ HashCode hashCode = hasher.hash();
+ long retVal = hashCode.asLong();
+ return retVal;
+ }
+}
diff --git a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoordsTest.java b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoordsTest.java
index 669f828d9bb..cf1beb1a9a7 100644
--- a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoordsTest.java
+++ b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoordsTest.java
@@ -2,15 +2,16 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
-import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
public class ResourceIndexedSearchParamCoordsTest {
@Test
- public void testEquals() {
+ public void testEqualsAndHashCode_withSameParams_equalsIsTrueAndHashCodeIsSame() {
ResourceIndexedSearchParamCoords val1 = new ResourceIndexedSearchParamCoords()
.setLatitude(100)
.setLongitude(10);
@@ -21,8 +22,55 @@ public class ResourceIndexedSearchParamCoordsTest {
.setLongitude(10);
val2.setPartitionSettings(new PartitionSettings());
val2.calculateHashes();
- assertNotNull(val1);
- assertEquals(val1, val2);
- assertThat("").isNotEqualTo(val1);
+ validateEquals(val2, val1);
+ }
+
+ private void validateEquals(ResourceIndexedSearchParamCoords theParam1, ResourceIndexedSearchParamCoords theParam2) {
+ assertEquals(theParam2, theParam1);
+ assertEquals(theParam1, theParam2);
+ assertEquals(theParam1.hashCode(), theParam2.hashCode());
+ }
+
+ @Test
+ public void testEqualsAndHashCode_withOptimizedSearchParam_equalsIsTrueAndHashCodeIsSame() {
+ ResourceIndexedSearchParamCoords param = new ResourceIndexedSearchParamCoords(
+ new PartitionSettings(), "Patient", "param", 100, 10);
+ ResourceIndexedSearchParamCoords param2 = new ResourceIndexedSearchParamCoords(
+ new PartitionSettings(), "Patient", "param", 100, 10);
+
+ param2.optimizeIndexStorage();
+
+ validateEquals(param, param2);
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ "Patient, param, 100, 100, false, Observation, param, 100, 100, false, ResourceType is different",
+ "Patient, param, 100, 100, false, Patient, name, 100, 100, false, ParamName is different",
+ "Patient, param, 10, 100, false, Patient, param, 100, 100, false, Latitude is different",
+ "Patient, param, 100, 10, false, Patient, param, 100, 100, false, Longitude is different",
+ "Patient, param, 100, 100, true, Patient, param, 100, 100, false, Missing is different",
+ })
+ public void testEqualsAndHashCode_withDifferentParams_equalsIsFalseAndHashCodeIsDifferent(String theFirstResourceType,
+ String theFirstParamName,
+ double theFirstLatitude,
+ double theFirstLongitude,
+ boolean theFirstMissing,
+ String theSecondResourceType,
+ String theSecondParamName,
+ double theSecondLatitude,
+ double theSecondLongitude,
+ boolean theSecondMissing,
+ String theMessage) {
+ ResourceIndexedSearchParamCoords param = new ResourceIndexedSearchParamCoords(
+ new PartitionSettings(), theFirstResourceType, theFirstParamName, theFirstLatitude, theFirstLongitude);
+ param.setMissing(theFirstMissing);
+ ResourceIndexedSearchParamCoords param2 = new ResourceIndexedSearchParamCoords(
+ new PartitionSettings(), theSecondResourceType, theSecondParamName, theSecondLatitude, theSecondLongitude);
+ param2.setMissing(theSecondMissing);
+
+ assertNotEquals(param, param2, theMessage);
+ assertNotEquals(param2, param, theMessage);
+ assertNotEquals(param.hashCode(), param2.hashCode(), theMessage);
}
}
diff --git a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDateTest.java b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDateTest.java
index c9137af9714..dfefe9a77ca 100644
--- a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDateTest.java
+++ b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDateTest.java
@@ -3,16 +3,17 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
import java.sql.Timestamp;
+import java.time.Instant;
import java.util.Calendar;
import java.util.Date;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertFalse;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
-import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
public class ResourceIndexedSearchParamDateTest {
@@ -43,9 +44,7 @@ public class ResourceIndexedSearchParamDateTest {
ResourceIndexedSearchParamDate param = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", null, null, null, null, "SomeValue");
ResourceIndexedSearchParamDate param2 = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", null, null, null, null, "SomeValue");
- assertTrue(param.equals(param2));
- assertTrue(param2.equals(param));
- assertEquals(param.hashCode(), param2.hashCode());
+ validateEquals(param, param2);
}
@Test
@@ -53,9 +52,7 @@ public class ResourceIndexedSearchParamDateTest {
ResourceIndexedSearchParamDate param = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", date1A, null, date2A, null, "SomeValue");
ResourceIndexedSearchParamDate param2 = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", date1B, null, date2B, null, "SomeValue");
- assertTrue(param.equals(param2));
- assertTrue(param2.equals(param));
- assertEquals(param.hashCode(), param2.hashCode());
+ validateEquals(param, param2);
}
@Test
@@ -63,9 +60,7 @@ public class ResourceIndexedSearchParamDateTest {
ResourceIndexedSearchParamDate param = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", timestamp1A, null, timestamp2A, null, "SomeValue");
ResourceIndexedSearchParamDate param2 = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", timestamp1B, null, timestamp2B, null, "SomeValue");
- assertTrue(param.equals(param2));
- assertTrue(param2.equals(param));
- assertEquals(param.hashCode(), param2.hashCode());
+ validateEquals(param, param2);
}
// Scenario that occurs when updating a resource with a date search parameter. One date will be a java.util.Date, the
@@ -75,9 +70,23 @@ public class ResourceIndexedSearchParamDateTest {
ResourceIndexedSearchParamDate param = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", date1A, null, date2A, null, "SomeValue");
ResourceIndexedSearchParamDate param2 = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", timestamp1A, null, timestamp2A, null, "SomeValue");
- assertTrue(param.equals(param2));
- assertTrue(param2.equals(param));
- assertEquals(param.hashCode(), param2.hashCode());
+ validateEquals(param, param2);
+ }
+
+ @Test
+ public void equalsIsTrueForOptimizedSearchParam() {
+ ResourceIndexedSearchParamDate param = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", date1A, null, date2A, null, "SomeValue");
+ ResourceIndexedSearchParamDate param2 = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", date1A, null, date2A, null, "SomeValue");
+
+ param2.optimizeIndexStorage();
+
+ validateEquals(param, param2);
+ }
+
+ private void validateEquals(ResourceIndexedSearchParamDate theParam, ResourceIndexedSearchParamDate theParam2) {
+ assertEquals(theParam, theParam2);
+ assertEquals(theParam2, theParam);
+ assertEquals(theParam.hashCode(), theParam2.hashCode());
}
@Test
@@ -85,9 +94,7 @@ public class ResourceIndexedSearchParamDateTest {
ResourceIndexedSearchParamDate param = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", date1A, null, date2A, null, "SomeValue");
ResourceIndexedSearchParamDate param2 = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", date2A, null, date1A, null, "SomeValue");
- assertFalse(param.equals(param2));
- assertFalse(param2.equals(param));
- assertThat(param2.hashCode()).isNotEqualTo(param.hashCode());
+ validateNotEquals(param, param2);
}
@Test
@@ -95,9 +102,7 @@ public class ResourceIndexedSearchParamDateTest {
ResourceIndexedSearchParamDate param = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", date1A, null, date2A, null, "SomeValue");
ResourceIndexedSearchParamDate param2 = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", null, null, null, null, "SomeValue");
- assertFalse(param.equals(param2));
- assertFalse(param2.equals(param));
- assertThat(param2.hashCode()).isNotEqualTo(param.hashCode());
+ validateNotEquals(param, param2);
}
@Test
@@ -105,9 +110,7 @@ public class ResourceIndexedSearchParamDateTest {
ResourceIndexedSearchParamDate param = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", timestamp1A, null, timestamp2A, null, "SomeValue");
ResourceIndexedSearchParamDate param2 = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", timestamp2A, null, timestamp1A, null, "SomeValue");
- assertFalse(param.equals(param2));
- assertFalse(param2.equals(param));
- assertThat(param2.hashCode()).isNotEqualTo(param.hashCode());
+ validateNotEquals(param, param2);
}
@Test
@@ -115,14 +118,18 @@ public class ResourceIndexedSearchParamDateTest {
ResourceIndexedSearchParamDate param = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", date1A, null, date2A, null, "SomeValue");
ResourceIndexedSearchParamDate param2 = new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "SomeResource", timestamp2A, null, timestamp1A, null, "SomeValue");
- assertFalse(param.equals(param2));
- assertFalse(param2.equals(param));
- assertThat(param2.hashCode()).isNotEqualTo(param.hashCode());
+ validateNotEquals(param, param2);
+ }
+
+ private void validateNotEquals(ResourceIndexedSearchParamDate theParam, ResourceIndexedSearchParamDate theParam2) {
+ assertNotEquals(theParam, theParam2);
+ assertNotEquals(theParam2, theParam);
+ assertThat(theParam2.hashCode()).isNotEqualTo(theParam.hashCode());
}
@Test
- public void testEquals() {
+ public void testEqualsAndHashCode_withSameParams_equalsIsTrueAndHashCodeIsSame() {
ResourceIndexedSearchParamDate val1 = new ResourceIndexedSearchParamDate()
.setValueHigh(new Date(100000000L))
.setValueLow(new Date(111111111L));
@@ -133,8 +140,47 @@ public class ResourceIndexedSearchParamDateTest {
.setValueLow(new Date(111111111L));
val2.setPartitionSettings(new PartitionSettings());
val2.calculateHashes();
- assertNotNull(val1);
- assertEquals(val1, val2);
- assertThat("").isNotEqualTo(val1);
+ validateEquals(val1, val2);
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ "Patient, param, 2018-04-25T14:05:15.953Z, 2019-04-25T14:05:15.953Z, false, " +
+ "Observation, param, 2018-04-25T14:05:15.953Z, 2019-04-25T14:05:15.953Z, false, ResourceType is different",
+ "Patient, param, 2018-04-25T14:05:15.953Z, 2019-04-25T14:05:15.953Z, false, " +
+ "Patient, name, 2018-04-25T14:05:15.953Z, 2019-04-25T14:05:15.953Z, false, ParamName is different",
+ "Patient, param, 2017-04-25T14:05:15.953Z, 2019-04-25T14:05:15.953Z, false, " +
+ "Patient, param, 2018-04-25T14:05:15.953Z, 2019-04-25T14:05:15.953Z, false, LowDate is different",
+ "Patient, param, 2018-04-25T14:05:15.953Z, 2019-04-25T14:05:15.953Z, false, " +
+ "Patient, param, 2018-04-25T14:05:15.953Z, 2020-04-25T14:05:15.953Z, false, HighDate is different",
+ "Patient, param, 2018-04-25T14:05:15.953Z, 2019-04-25T14:05:15.953Z, true, " +
+ "Patient, param, 2018-04-25T14:05:15.953Z, 2019-04-25T14:05:15.953Z, false, Missing is different",
+ })
+ public void testEqualsAndHashCode_withDifferentParams_equalsIsFalseAndHashCodeIsDifferent(String theFirstResourceType,
+ String theFirstParamName,
+ String theFirstLowDate,
+ String theFirstHighDate,
+ boolean theFirstMissing,
+ String theSecondResourceType,
+ String theSecondParamName,
+ String theSecondLowDate,
+ String theSecondHighDate,
+ boolean theSecondMissing,
+ String theMessage) {
+ Date firstLowDate = Date.from(Instant.parse(theFirstLowDate));
+ Date firstHighDate = Date.from(Instant.parse(theFirstHighDate));
+ ResourceIndexedSearchParamDate param = new ResourceIndexedSearchParamDate(new PartitionSettings(),
+ theFirstResourceType, theFirstParamName, firstLowDate, theFirstLowDate, firstHighDate, theFirstHighDate, null);
+ param.setMissing(theFirstMissing);
+
+ Date secondLowDate = Date.from(Instant.parse(theSecondLowDate));
+ Date secondHighDate = Date.from(Instant.parse(theSecondHighDate));
+ ResourceIndexedSearchParamDate param2 = new ResourceIndexedSearchParamDate(new PartitionSettings(),
+ theSecondResourceType, theSecondParamName, secondLowDate, theSecondLowDate, secondHighDate, theSecondHighDate, null);
+ param2.setMissing(theSecondMissing);
+
+ assertNotEquals(param, param2, theMessage);
+ assertNotEquals(param2, param, theMessage);
+ assertNotEquals(param.hashCode(), param2.hashCode(), theMessage);
}
}
diff --git a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumberTest.java b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumberTest.java
index 7a73c54820c..feb95bec173 100644
--- a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumberTest.java
+++ b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumberTest.java
@@ -3,23 +3,29 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
import java.math.BigDecimal;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
public class ResourceIndexedSearchParamNumberTest {
private static final String GRITTSCORE = "grittscore";
- public static final ResourceIndexedSearchParamNumber PARAM_VALUE_10_FIRST = new ResourceIndexedSearchParamNumber(new PartitionSettings(), "Patient", GRITTSCORE, BigDecimal.valueOf(10));
- public static final ResourceIndexedSearchParamNumber PARAM_VALUE_10_SECOND = new ResourceIndexedSearchParamNumber(new PartitionSettings(), "Patient", GRITTSCORE, BigDecimal.valueOf(10));
- public static final ResourceIndexedSearchParamNumber PARAM_VALUE_12_FIRST = new ResourceIndexedSearchParamNumber(new PartitionSettings(), "Patient", GRITTSCORE, BigDecimal.valueOf(12));
+ public static ResourceIndexedSearchParamNumber PARAM_VALUE_10_FIRST;
+ public static ResourceIndexedSearchParamNumber PARAM_VALUE_10_SECOND;
+ public static ResourceIndexedSearchParamNumber PARAM_VALUE_12_FIRST;
@BeforeEach
void setUp() {
final ResourceTable resourceTable = new ResourceTable();
resourceTable.setId(1L);
+ PARAM_VALUE_10_FIRST = new ResourceIndexedSearchParamNumber(new PartitionSettings(), "Patient", GRITTSCORE, BigDecimal.valueOf(10));
+ PARAM_VALUE_10_SECOND = new ResourceIndexedSearchParamNumber(new PartitionSettings(), "Patient", GRITTSCORE, BigDecimal.valueOf(10));
+ PARAM_VALUE_12_FIRST = new ResourceIndexedSearchParamNumber(new PartitionSettings(), "Patient", GRITTSCORE, BigDecimal.valueOf(12));
PARAM_VALUE_10_FIRST.setResource(resourceTable);
PARAM_VALUE_10_SECOND.setResource(resourceTable);
PARAM_VALUE_12_FIRST.setResource(resourceTable);
@@ -32,6 +38,34 @@ public class ResourceIndexedSearchParamNumberTest {
assertThat(PARAM_VALUE_12_FIRST.hashCode()).isNotEqualTo(PARAM_VALUE_10_FIRST.hashCode());
}
+ @ParameterizedTest
+ @CsvSource({
+ "Patient, param, 10, false, Observation, param, 10, false, ResourceType is different",
+ "Patient, param, 10, false, Patient, name, 10, false, ParamName is different",
+ "Patient, param, 10, false, Patient, param, 9, false, Value is different",
+ "Patient, param, 10, false, Patient, param, 10, true, Missing is different",
+ })
+ public void testEqualsAndHashCode_withDifferentParams_equalsIsFalseAndHashCodeIsDifferent(String theFirstResourceType,
+ String theFirstParamName,
+ int theFirstValue,
+ boolean theFirstMissing,
+ String theSecondResourceType,
+ String theSecondParamName,
+ int theSecondValue,
+ boolean theSecondMissing,
+ String theMessage) {
+ ResourceIndexedSearchParamNumber param = new ResourceIndexedSearchParamNumber(
+ new PartitionSettings(), theFirstResourceType, theFirstParamName, BigDecimal.valueOf(theFirstValue));
+ param.setMissing(theFirstMissing);
+ ResourceIndexedSearchParamNumber param2 = new ResourceIndexedSearchParamNumber(
+ new PartitionSettings(), theSecondResourceType, theSecondParamName, BigDecimal.valueOf(theSecondValue));
+ param2.setMissing(theSecondMissing);
+
+ assertNotEquals(param, param2, theMessage);
+ assertNotEquals(param2, param, theMessage);
+ assertNotEquals(param.hashCode(), param2.hashCode(), theMessage);
+ }
+
@Test
void equalByReference() {
assertEquals(PARAM_VALUE_10_FIRST, PARAM_VALUE_10_FIRST);
@@ -44,4 +78,13 @@ public class ResourceIndexedSearchParamNumberTest {
assertEquals(PARAM_VALUE_10_SECOND, PARAM_VALUE_10_FIRST);
assertEquals(PARAM_VALUE_10_FIRST.hashCode(), PARAM_VALUE_10_SECOND.hashCode());
}
+
+ @Test
+ void equalsIsTrueForOptimizedSearchParam() {
+ PARAM_VALUE_10_SECOND.optimizeIndexStorage();
+
+ assertEquals(PARAM_VALUE_10_FIRST, PARAM_VALUE_10_SECOND);
+ assertEquals(PARAM_VALUE_10_SECOND, PARAM_VALUE_10_FIRST);
+ assertEquals(PARAM_VALUE_10_FIRST.hashCode(), PARAM_VALUE_10_SECOND.hashCode());
+ }
}
diff --git a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityNormalizedTest.java b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityNormalizedTest.java
index 39d2f459738..b42b96b660a 100644
--- a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityNormalizedTest.java
+++ b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityNormalizedTest.java
@@ -2,10 +2,11 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
-import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
public class ResourceIndexedSearchParamQuantityNormalizedTest {
@@ -20,10 +21,59 @@ public class ResourceIndexedSearchParamQuantityNormalizedTest {
.setValue(Double.parseDouble("123"));
val2.setPartitionSettings(new PartitionSettings());
val2.calculateHashes();
- assertNotNull(val1);
- assertEquals(val1, val2);
- assertThat("").isNotEqualTo(val1);
+ validateEquals(val1, val2);
}
+ @Test
+ public void equalsIsTrueForOptimizedSearchParam() {
+ BaseResourceIndexedSearchParamQuantity param = new ResourceIndexedSearchParamQuantityNormalized(
+ new PartitionSettings(), "Patient", "param", 123.0, "http://unitsofmeasure.org", "kg");
+ BaseResourceIndexedSearchParamQuantity param2 = new ResourceIndexedSearchParamQuantityNormalized(
+ new PartitionSettings(), "Patient", "param", 123.0, "http://unitsofmeasure.org", "kg");
+ param2.optimizeIndexStorage();
+
+ validateEquals(param, param2);
+ }
+
+ private void validateEquals(BaseResourceIndexedSearchParamQuantity theParam1,
+ BaseResourceIndexedSearchParamQuantity theParam2) {
+ assertEquals(theParam2, theParam1);
+ assertEquals(theParam1, theParam2);
+ assertEquals(theParam1.hashCode(), theParam2.hashCode());
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ "Patient, param, 123.0, units, kg, false, Observation, param, 123.0, units, kg, false, ResourceType is different",
+ "Patient, param, 123.0, units, kg, false, Patient, name, 123.0, units, kg, false, ParamName is different",
+ "Patient, param, 123.0, units, kg, false, Patient, param, 321.0, units, kg, false, Value is different",
+ "Patient, param, 123.0, units, kg, false, Patient, param, 123.0, unitsDiff, kg, false, System is different",
+ "Patient, param, 123.0, units, kg, false, Patient, param, 123.0, units, lb, false, Units is different",
+ "Patient, param, 123.0, units, kg, false, Patient, param, 123.0, units, kg, true, Missing is different",
+ })
+ public void testEqualsAndHashCode_withDifferentParams_equalsIsFalseAndHashCodeIsDifferent(String theFirstResourceType,
+ String theFirstParamName,
+ double theFirstValue,
+ String theFirstSystem,
+ String theFirstUnits,
+ boolean theFirstMissing,
+ String theSecondResourceType,
+ String theSecondParamName,
+ double theSecondValue,
+ String theSecondSystem,
+ String theSecondUnits,
+ boolean theSecondMissing,
+ String theMessage) {
+ BaseResourceIndexedSearchParamQuantity param = new ResourceIndexedSearchParamQuantityNormalized(
+ new PartitionSettings(), theFirstResourceType, theFirstParamName, theFirstValue, theFirstSystem, theFirstUnits);
+ param.setMissing(theFirstMissing);
+ BaseResourceIndexedSearchParamQuantity param2 = new ResourceIndexedSearchParamQuantityNormalized(
+ new PartitionSettings(), theSecondResourceType, theSecondParamName, theSecondValue, theSecondSystem, theSecondUnits);
+ param2.setMissing(theSecondMissing);
+
+ assertNotEquals(param, param2, theMessage);
+ assertNotEquals(param2, param, theMessage);
+ assertNotEquals(param.hashCode(), param2.hashCode(), theMessage);
+ }
}
diff --git a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityTest.java b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityTest.java
index d03895b237d..5d1577ef501 100644
--- a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityTest.java
+++ b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityTest.java
@@ -2,12 +2,13 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
import java.math.BigDecimal;
-import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
public class ResourceIndexedSearchParamQuantityTest {
@@ -38,10 +39,58 @@ public class ResourceIndexedSearchParamQuantityTest {
.setValue(new BigDecimal(123));
val2.setPartitionSettings(new PartitionSettings());
val2.calculateHashes();
- assertNotNull(val1);
- assertEquals(val1, val2);
- assertThat("").isNotEqualTo(val1);
+ validateEquals(val1, val2);
}
+ @Test
+ public void equalsIsTrueForOptimizedSearchParam() {
+ BaseResourceIndexedSearchParamQuantity param = createParam("NAME", "123.001", "value", "VALUE");
+ BaseResourceIndexedSearchParamQuantity param2 = createParam("NAME", "123.001", "value", "VALUE");
+
+ param2.optimizeIndexStorage();
+
+ validateEquals(param, param2);
+ }
+
+ private void validateEquals(BaseResourceIndexedSearchParamQuantity theParam1,
+ BaseResourceIndexedSearchParamQuantity theParam2) {
+ assertEquals(theParam2, theParam1);
+ assertEquals(theParam1, theParam2);
+ assertEquals(theParam1.hashCode(), theParam2.hashCode());
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ "Patient, param, 123.0, units, kg, false, Observation, param, 123.0, units, kg, false, ResourceType is different",
+ "Patient, param, 123.0, units, kg, false, Patient, name, 123.0, units, kg, false, ParamName is different",
+ "Patient, param, 123.0, units, kg, false, Patient, param, 321.0, units, kg, false, Value is different",
+ "Patient, param, 123.0, units, kg, false, Patient, param, 123.0, unitsDiff, kg, false, System is different",
+ "Patient, param, 123.0, units, kg, false, Patient, param, 123.0, units, lb, false, Units is different",
+ "Patient, param, 123.0, units, kg, false, Patient, param, 123.0, units, kg, true, Missing is different",
+ })
+ public void testEqualsAndHashCode_withDifferentParams_equalsIsFalseAndHashCodeIsDifferent(String theFirstResourceType,
+ String theFirstParamName,
+ double theFirstValue,
+ String theFirstSystem,
+ String theFirstUnits,
+ boolean theFirstMissing,
+ String theSecondResourceType,
+ String theSecondParamName,
+ double theSecondValue,
+ String theSecondSystem,
+ String theSecondUnits,
+ boolean theSecondMissing,
+ String theMessage) {
+ BaseResourceIndexedSearchParamQuantity param = new ResourceIndexedSearchParamQuantity(
+ new PartitionSettings(), theFirstResourceType, theFirstParamName, new BigDecimal(theFirstValue), theFirstSystem, theFirstUnits);
+ param.setMissing(theFirstMissing);
+ BaseResourceIndexedSearchParamQuantity param2 = new ResourceIndexedSearchParamQuantity(
+ new PartitionSettings(), theSecondResourceType, theSecondParamName, new BigDecimal(theSecondValue), theSecondSystem, theSecondUnits);
+ param2.setMissing(theSecondMissing);
+
+ assertNotEquals(param, param2, theMessage);
+ assertNotEquals(param2, param, theMessage);
+ assertNotEquals(param.hashCode(), param2.hashCode(), theMessage);
+ }
}
diff --git a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamStringTest.java b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamStringTest.java
index f271c6a6743..d5fcf5f7a5d 100644
--- a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamStringTest.java
+++ b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamStringTest.java
@@ -2,12 +2,12 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
-import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
@SuppressWarnings("SpellCheckingInspection")
public class ResourceIndexedSearchParamStringTest {
@@ -85,10 +85,7 @@ public class ResourceIndexedSearchParamStringTest {
val2.setPartitionSettings(new PartitionSettings());
val2.setStorageSettings(new StorageSettings());
val2.calculateHashes();
- assertNotNull(val1);
- assertEquals(val1, val2);
-
- assertThat("").isNotEqualTo(val1);
+ validateEquals(val1, val2);
}
@Test
@@ -105,9 +102,55 @@ public class ResourceIndexedSearchParamStringTest {
val2.setPartitionSettings(new PartitionSettings().setIncludePartitionInSearchHashes(true));
val2.setStorageSettings(new StorageSettings());
val2.calculateHashes();
- assertNotNull(val1);
- assertEquals(val1, val2);
- assertThat("").isNotEqualTo(val1);
+ validateEquals(val1, val2);
+ }
+
+ @Test
+ public void equalsIsTrueForOptimizedSearchParam() {
+ ResourceIndexedSearchParamString param = new ResourceIndexedSearchParamString(new PartitionSettings(), new StorageSettings(), "Patient", "param", "aaa", "AAA");
+ ResourceIndexedSearchParamString param2 = new ResourceIndexedSearchParamString(new PartitionSettings(), new StorageSettings(), "Patient", "param", "aaa", "AAA");
+
+ param2.optimizeIndexStorage();
+
+ validateEquals(param, param2);
+ }
+
+ private void validateEquals(ResourceIndexedSearchParamString theParam1,
+ ResourceIndexedSearchParamString theParam2) {
+ assertEquals(theParam2, theParam1);
+ assertEquals(theParam1, theParam2);
+ assertEquals(theParam1.hashCode(), theParam2.hashCode());
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ "Patient, param, aaa, AAA, false, Observation, param, aaa, AAA, false, ResourceType is different",
+ "Patient, param, aaa, AAA, false, Patient, name, aaa, AAA, false, ParamName is different",
+ "Patient, param, aaa, AAA, false, Patient, param, bbb, AAA, false, Value is different",
+ "Patient, param, aaa, AAA, false, Patient, param, aaa, BBB, false, ValueNormalized is different",
+ "Patient, param, aaa, AAA, false, Patient, param, aaa, AAA, true, Missing is different",
+ })
+ public void testEqualsAndHashCode_withDifferentParams_equalsIsFalseAndHashCodeIsDifferent(String theFirstResourceType,
+ String theFirstParamName,
+ String theFirstValue,
+ String theFirstValueNormalized,
+ boolean theFirstMissing,
+ String theSecondResourceType,
+ String theSecondParamName,
+ String theSecondValue,
+ String theSecondValueNormalized,
+ boolean theSecondMissing,
+ String theMessage) {
+ ResourceIndexedSearchParamString param = new ResourceIndexedSearchParamString(new PartitionSettings(),
+ new StorageSettings(), theFirstResourceType, theFirstParamName, theFirstValue, theFirstValueNormalized);
+ param.setMissing(theFirstMissing);
+ ResourceIndexedSearchParamString param2 = new ResourceIndexedSearchParamString(new PartitionSettings(),
+ new StorageSettings(), theSecondResourceType, theSecondParamName, theSecondValue, theSecondValueNormalized);
+ param2.setMissing(theSecondMissing);
+
+ assertNotEquals(param, param2, theMessage);
+ assertNotEquals(param2, param, theMessage);
+ assertNotEquals(param.hashCode(), param2.hashCode(), theMessage);
}
}
diff --git a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamTokenTest.java b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamTokenTest.java
index 92afa9b10c3..c1d867c6c5a 100644
--- a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamTokenTest.java
+++ b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamTokenTest.java
@@ -2,10 +2,11 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
-import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
public class ResourceIndexedSearchParamTokenTest {
@@ -43,9 +44,54 @@ public class ResourceIndexedSearchParamTokenTest {
.setValue("AAA");
val2.setPartitionSettings(new PartitionSettings());
val2.calculateHashes();
- assertNotNull(val1);
- assertEquals(val1, val2);
- assertThat("").isNotEqualTo(val1);
+ validateEquals(val1, val2);
}
+ @Test
+ public void equalsIsTrueForOptimizedSearchParam() {
+ ResourceIndexedSearchParamToken param = new ResourceIndexedSearchParamToken(new PartitionSettings(), "Patient", "NAME", "SYSTEM", "VALUE");
+ ResourceIndexedSearchParamToken param2 = new ResourceIndexedSearchParamToken(new PartitionSettings(), "Patient", "NAME", "SYSTEM", "VALUE");
+
+ param2.optimizeIndexStorage();
+
+ validateEquals(param, param2);
+ }
+
+ private void validateEquals(ResourceIndexedSearchParamToken theParam1,
+ ResourceIndexedSearchParamToken theParam2) {
+ assertEquals(theParam2, theParam1);
+ assertEquals(theParam1, theParam2);
+ assertEquals(theParam1.hashCode(), theParam2.hashCode());
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ "Patient, param, system, value, false, Observation, param, system, value, false, ResourceType is different",
+ "Patient, param, system, value, false, Patient, name, system, value, false, ParamName is different",
+ "Patient, param, system, value, false, Patient, param, sys, value, false, System is different",
+ "Patient, param, system, value, false, Patient, param, system, val, false, Value is different",
+ "Patient, param, system, value, false, Patient, param, system, value, true, Missing is different",
+ })
+ public void testEqualsAndHashCode_withDifferentParams_equalsIsFalseAndHashCodeIsDifferent(String theFirstResourceType,
+ String theFirstParamName,
+ String theFirstSystem,
+ String theFirstValue,
+ boolean theFirstMissing,
+ String theSecondResourceType,
+ String theSecondParamName,
+ String theSecondSystem,
+ String theSecondValue,
+ boolean theSecondMissing,
+ String theMessage) {
+ ResourceIndexedSearchParamToken param = new ResourceIndexedSearchParamToken(
+ new PartitionSettings(), theFirstResourceType, theFirstParamName, theFirstSystem, theFirstValue);
+ param.setMissing(theFirstMissing);
+ ResourceIndexedSearchParamToken param2 = new ResourceIndexedSearchParamToken(
+ new PartitionSettings(), theSecondResourceType, theSecondParamName, theSecondSystem, theSecondValue);
+ param2.setMissing(theSecondMissing);
+
+ assertNotEquals(param, param2, theMessage);
+ assertNotEquals(param2, param, theMessage);
+ assertNotEquals(param.hashCode(), param2.hashCode(), theMessage);
+ }
}
diff --git a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUriTest.java b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUriTest.java
index eb04e913758..2d551dcb7fb 100644
--- a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUriTest.java
+++ b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUriTest.java
@@ -2,10 +2,11 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
-import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
public class ResourceIndexedSearchParamUriTest {
@@ -29,10 +30,52 @@ public class ResourceIndexedSearchParamUriTest {
.setUri("http://foo");
val2.setPartitionSettings(new PartitionSettings());
val2.calculateHashes();
- assertNotNull(val1);
- assertEquals(val1, val2);
- assertThat("").isNotEqualTo(val1);
+ validateEquals(val1, val2);
}
+ @Test
+ public void equalsIsTrueForOptimizedSearchParam() {
+ ResourceIndexedSearchParamUri param = new ResourceIndexedSearchParamUri(new PartitionSettings(), "Patient", "NAME", "http://foo");
+ ResourceIndexedSearchParamUri param2 = new ResourceIndexedSearchParamUri(new PartitionSettings(), "Patient", "NAME", "http://foo");
+
+ param2.optimizeIndexStorage();
+
+ validateEquals(param, param2);
+ }
+
+ private void validateEquals(ResourceIndexedSearchParamUri theParam1,
+ ResourceIndexedSearchParamUri theParam2) {
+ assertEquals(theParam2, theParam1);
+ assertEquals(theParam1, theParam2);
+ assertEquals(theParam1.hashCode(), theParam2.hashCode());
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ "Patient, param, http://test, false, Observation, param, http://test, false, ResourceType is different",
+ "Patient, param, http://test, false, Patient, name, http://test, false, ParamName is different",
+ "Patient, param, http://test, false, Patient, param, http://diff, false, Uri is different",
+ "Patient, param, http://test, false, Patient, param, http://test, true, Missing is different",
+ })
+ public void testEqualsAndHashCode_withDifferentParams_equalsIsFalseAndHashCodeIsDifferent(String theFirstResourceType,
+ String theFirstParamName,
+ String theFirstUri,
+ boolean theFirstMissing,
+ String theSecondResourceType,
+ String theSecondParamName,
+ String theSecondUri,
+ boolean theSecondMissing,
+ String theMessage) {
+ ResourceIndexedSearchParamUri param = new ResourceIndexedSearchParamUri(new PartitionSettings(),
+ theFirstResourceType, theFirstParamName, theFirstUri);
+ param.setMissing(theFirstMissing);
+ ResourceIndexedSearchParamUri param2 = new ResourceIndexedSearchParamUri(new PartitionSettings(),
+ theSecondResourceType, theSecondParamName, theSecondUri);
+ param2.setMissing(theSecondMissing);
+
+ assertNotEquals(param, param2, theMessage);
+ assertNotEquals(param2, param, theMessage);
+ assertNotEquals(param.hashCode(), param2.hashCode(), theMessage);
+ }
}
diff --git a/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/util/SearchParamHashUtilTest.java b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/util/SearchParamHashUtilTest.java
new file mode 100644
index 00000000000..fb709cdbed9
--- /dev/null
+++ b/hapi-fhir-jpaserver-model/src/test/java/ca/uhn/fhir/jpa/model/util/SearchParamHashUtilTest.java
@@ -0,0 +1,68 @@
+package ca.uhn.fhir.jpa.model.util;
+
+import ca.uhn.fhir.i18n.Msg;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
+
+public class SearchParamHashUtilTest {
+
+ private final PartitionSettings myPartitionSettings = new PartitionSettings();
+
+ @BeforeEach
+ void setUp() {
+ myPartitionSettings.setPartitioningEnabled(false);
+ }
+
+ @Test
+ public void hashSearchParam_withPartitionDisabled_generatesCorrectHashIdentity() {
+ Long hashIdentity = SearchParamHash.hashSearchParam(myPartitionSettings, null, "Patient", "name");
+ // Make sure hashing function gives consistent results
+ assertEquals(-1575415002568401616L, hashIdentity);
+ }
+
+ @Test
+ public void hashSearchParam_withPartitionDisabledAndNullValue_generatesCorrectHashIdentity() {
+ Long hashIdentity = SearchParamHash.hashSearchParam(myPartitionSettings, null, "Patient", "name", null);
+ // Make sure hashing function gives consistent results
+ assertEquals(-440750991942222070L, hashIdentity);
+ }
+
+ @Test
+ public void hashSearchParam_withIncludePartitionInSearchHashesAndNullRequestPartitionId_doesNotThrowException() {
+ myPartitionSettings.setPartitioningEnabled(true);
+ myPartitionSettings.setIncludePartitionInSearchHashes(true);
+
+ Long hashIdentity = SearchParamHash.hashSearchParam(myPartitionSettings, null, "Patient", "name");
+ assertEquals(-1575415002568401616L, hashIdentity);
+ }
+
+ @Test
+ public void hashSearchParam_withIncludePartitionInSearchHashesAndRequestPartitionId_includesPartitionIdInHash() {
+ myPartitionSettings.setPartitioningEnabled(true);
+ myPartitionSettings.setIncludePartitionInSearchHashes(true);
+ RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionId(1);
+
+ Long hashIdentity = SearchParamHash.hashSearchParam(myPartitionSettings, requestPartitionId, "Patient", "name");
+ assertEquals(-6667609654163557704L, hashIdentity);
+ }
+
+ @Test
+ public void hashSearchParam_withIncludePartitionInSearchHashesAndMultipleRequestPartitionIds_throwsException() {
+ myPartitionSettings.setPartitioningEnabled(true);
+ myPartitionSettings.setIncludePartitionInSearchHashes(true);
+ RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionIds(1, 2);
+
+ try {
+ SearchParamHash.hashSearchParam(myPartitionSettings, requestPartitionId, "Patient", "name");
+ fail();
+ } catch (InternalErrorException e) {
+ assertEquals(Msg.code(1527) + "Can not search multiple partitions when partitions are included in search hashes", e.getMessage());
+ }
+ }
+}
diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParams.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParams.java
index f1cabaf294d..e9e9caa114d 100644
--- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParams.java
+++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParams.java
@@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.searchparam.extractor;
import ca.uhn.fhir.context.RuntimeSearchParam;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
@@ -37,6 +38,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.SearchParamPresentEntity;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
+import ca.uhn.fhir.jpa.model.util.SearchParamHash;
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
import ca.uhn.fhir.jpa.searchparam.util.RuntimeSearchParamHelper;
import ca.uhn.fhir.model.api.IQueryParameterType;
@@ -294,7 +296,7 @@ public final class ResourceIndexedSearchParams {
}
for (BaseResourceIndexedSearchParam nextParam : resourceParams) {
- if (nextParam.getParamName().equalsIgnoreCase(theParamName)) {
+ if (isMatchSearchParam(theStorageSettings, theResourceName, theParamName, nextParam)) {
if (nextParam.matches(value)) {
return true;
}
@@ -304,6 +306,21 @@ public final class ResourceIndexedSearchParams {
return false;
}
+ public static boolean isMatchSearchParam(
+ StorageSettings theStorageSettings,
+ String theResourceName,
+ String theParamName,
+ BaseResourceIndexedSearchParam theIndexedSearchParam) {
+
+ if (theStorageSettings.isIndexStorageOptimized()) {
+ Long hashIdentity = SearchParamHash.hashSearchParam(
+ new PartitionSettings(), RequestPartitionId.defaultPartition(), theResourceName, theParamName);
+ return theIndexedSearchParam.getHashIdentity().equals(hashIdentity);
+ } else {
+ return theIndexedSearchParam.getParamName().equalsIgnoreCase(theParamName);
+ }
+ }
+
/**
* @deprecated Replace with the method below
*/
diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcher.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcher.java
index 571b43f03b6..78fc2cae5ef 100644
--- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcher.java
+++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcher.java
@@ -71,6 +71,7 @@ import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
+import static ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams.isMatchSearchParam;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@@ -579,11 +580,11 @@ public class InMemoryResourceMatcher {
switch (theQueryParam.getModifier()) {
case IN:
return theSearchParams.myTokenParams.stream()
- .filter(t -> t.getParamName().equals(theParamName))
+ .filter(t -> isMatchSearchParam(theStorageSettings, theResourceName, theParamName, t))
.anyMatch(t -> systemContainsCode(theQueryParam, t));
case NOT_IN:
return theSearchParams.myTokenParams.stream()
- .filter(t -> t.getParamName().equals(theParamName))
+ .filter(t -> isMatchSearchParam(theStorageSettings, theResourceName, theParamName, t))
.noneMatch(t -> systemContainsCode(theQueryParam, t));
case NOT:
return !theSearchParams.matchParam(
diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCache.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCache.java
index 6918ba990f7..d74faf0e656 100644
--- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCache.java
+++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCache.java
@@ -25,9 +25,15 @@ import ca.uhn.fhir.context.phonetic.IPhoneticEncoder;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorService;
import ca.uhn.fhir.interceptor.api.Pointcut;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
+import ca.uhn.fhir.jpa.model.util.SearchParamHash;
+import ca.uhn.fhir.rest.api.Constants;
+import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
+import ca.uhn.fhir.rest.server.util.IndexedSearchParam;
import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
@@ -46,14 +52,26 @@ import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
+import static ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum.DATE;
+import static ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum.NUMBER;
+import static ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum.QUANTITY;
+import static ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum.REFERENCE;
+import static ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum.SPECIAL;
+import static ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum.STRING;
+import static ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum.TOKEN;
+import static ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum.URI;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class JpaSearchParamCache {
private static final Logger ourLog = LoggerFactory.getLogger(JpaSearchParamCache.class);
+ private static final List SUPPORTED_INDEXED_SEARCH_PARAMS =
+ List.of(SPECIAL, DATE, NUMBER, QUANTITY, STRING, TOKEN, URI, REFERENCE);
+
volatile Map> myActiveComboSearchParams = Collections.emptyMap();
volatile Map, List>> myActiveParamNamesToComboSearchParams =
Collections.emptyMap();
+ volatile Map myHashIdentityToIndexedSearchParams = Collections.emptyMap();
public List getActiveComboSearchParams(String theResourceName) {
List retval = myActiveComboSearchParams.get(theResourceName);
@@ -90,6 +108,10 @@ public class JpaSearchParamCache {
return Collections.unmodifiableList(retVal);
}
+ public Optional getIndexedSearchParamByHashIdentity(Long theHashIdentity) {
+ return Optional.ofNullable(myHashIdentityToIndexedSearchParams.get(theHashIdentity));
+ }
+
void populateActiveSearchParams(
IInterceptorService theInterceptorBroadcaster,
IPhoneticEncoder theDefaultPhoneticEncoder,
@@ -99,6 +121,7 @@ public class JpaSearchParamCache {
Map idToRuntimeSearchParam = new HashMap<>();
List jpaSearchParams = new ArrayList<>();
+ Map hashIdentityToIndexedSearchParams = new HashMap<>();
/*
* Loop through parameters and find JPA params
@@ -133,6 +156,7 @@ public class JpaSearchParamCache {
}
setPhoneticEncoder(theDefaultPhoneticEncoder, nextCandidate);
+ populateIndexedSearchParams(theResourceName, nextCandidate, hashIdentityToIndexedSearchParams);
}
}
@@ -183,6 +207,7 @@ public class JpaSearchParamCache {
myActiveComboSearchParams = resourceNameToComboSearchParams;
myActiveParamNamesToComboSearchParams = activeParamNamesToComboSearchParams;
+ myHashIdentityToIndexedSearchParams = hashIdentityToIndexedSearchParams;
}
void setPhoneticEncoder(IPhoneticEncoder theDefaultPhoneticEncoder, RuntimeSearchParam searchParam) {
@@ -195,4 +220,36 @@ public class JpaSearchParamCache {
searchParam.setPhoneticEncoder(theDefaultPhoneticEncoder);
}
}
+
+ private void populateIndexedSearchParams(
+ String theResourceName,
+ RuntimeSearchParam theRuntimeSearchParam,
+ Map theHashIdentityToIndexedSearchParams) {
+
+ if (SUPPORTED_INDEXED_SEARCH_PARAMS.contains(theRuntimeSearchParam.getParamType())) {
+ addIndexedSearchParam(
+ theResourceName, theHashIdentityToIndexedSearchParams, theRuntimeSearchParam.getName());
+ // handle token search parameters with :of-type modifier
+ if (theRuntimeSearchParam.getParamType() == TOKEN) {
+ addIndexedSearchParam(
+ theResourceName,
+ theHashIdentityToIndexedSearchParams,
+ theRuntimeSearchParam.getName() + Constants.PARAMQUALIFIER_TOKEN_OF_TYPE);
+ }
+ // handle Uplifted Ref Chain Search Parameters
+ theRuntimeSearchParam.getUpliftRefchainCodes().stream()
+ .map(urCode -> String.format("%s.%s", theRuntimeSearchParam.getName(), urCode))
+ .forEach(urSpName ->
+ addIndexedSearchParam(theResourceName, theHashIdentityToIndexedSearchParams, urSpName));
+ }
+ }
+
+ private void addIndexedSearchParam(
+ String theResourceName,
+ Map theHashIdentityToIndexedSearchParams,
+ String theSpName) {
+ Long hashIdentity = SearchParamHash.hashSearchParam(
+ new PartitionSettings(), RequestPartitionId.defaultPartition(), theResourceName, theSpName);
+ theHashIdentityToIndexedSearchParams.put(hashIdentity, new IndexedSearchParam(theSpName, theResourceName));
+ }
}
diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImpl.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImpl.java
index 506499076f8..4d67c566bc1 100644
--- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImpl.java
+++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImpl.java
@@ -31,12 +31,14 @@ import ca.uhn.fhir.jpa.cache.IResourceChangeListenerCache;
import ca.uhn.fhir.jpa.cache.IResourceChangeListenerRegistry;
import ca.uhn.fhir.jpa.cache.ResourceChangeResult;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
+import ca.uhn.fhir.jpa.model.search.ISearchParamHashIdentityRegistry;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
+import ca.uhn.fhir.rest.server.util.IndexedSearchParam;
import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
import ca.uhn.fhir.util.SearchParameterUtil;
import ca.uhn.fhir.util.StopWatch;
@@ -65,7 +67,10 @@ import java.util.Set;
import static org.apache.commons.lang3.StringUtils.isBlank;
public class SearchParamRegistryImpl
- implements ISearchParamRegistry, IResourceChangeListener, ISearchParamRegistryController {
+ implements ISearchParamRegistry,
+ IResourceChangeListener,
+ ISearchParamRegistryController,
+ ISearchParamHashIdentityRegistry {
public static final Set NON_DISABLEABLE_SEARCH_PARAMS =
Collections.unmodifiableSet(Sets.newHashSet("*:url", "Subscription:*", "SearchParameter:*"));
@@ -147,6 +152,11 @@ public class SearchParamRegistryImpl
return myJpaSearchParamCache.getActiveComboSearchParams(theResourceName, theParamNames);
}
+ @Override
+ public Optional getIndexedSearchParamByHashIdentity(Long theHashIdentity) {
+ return myJpaSearchParamCache.getIndexedSearchParamByHashIdentity(theHashIdentity);
+ }
+
@Nullable
@Override
public RuntimeSearchParam getActiveSearchParamByUrl(String theUrl) {
diff --git a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParamsTest.java b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParamsTest.java
index 30ba81151b4..a1ad93d1f2d 100644
--- a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParamsTest.java
+++ b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParamsTest.java
@@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.searchparam.extractor;
+import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
@@ -7,6 +8,8 @@ import ca.uhn.fhir.rest.param.ReferenceParam;
import com.google.common.collect.Lists;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
import java.util.Date;
import java.util.List;
@@ -103,4 +106,35 @@ public class ResourceIndexedSearchParamsTest {
assertThat(values).as(values.toString()).isEmpty();
}
+ @ParameterizedTest
+ @CsvSource({
+ "name, name, , false, true",
+ "name, NAME, , false, true",
+ "name, name, 7000, false, true",
+ "name, param, , false, false",
+ "name, param, 7000, false, false",
+ " , name, -1575415002568401616, true, true",
+ "param, name, -1575415002568401616, true, true",
+ " , param, -1575415002568401616, true, false",
+ "name, param, -1575415002568401616, true, false",
+ })
+ public void testIsMatchSearchParams_matchesByParamNameOrHashIdentity(String theParamName,
+ String theExpectedParamName,
+ Long theHashIdentity,
+ boolean theIndexStorageOptimized,
+ boolean theShouldMatch) {
+ // setup
+ StorageSettings storageSettings = new StorageSettings();
+ storageSettings.setIndexStorageOptimized(theIndexStorageOptimized);
+ ResourceIndexedSearchParamString param = new ResourceIndexedSearchParamString();
+ param.setResourceType("Patient");
+ param.setParamName(theParamName);
+ param.setHashIdentity(theHashIdentity);
+
+ // execute
+ boolean isMatch = ResourceIndexedSearchParams.isMatchSearchParam(storageSettings, "Patient", theExpectedParamName, param);
+
+ // validate
+ assertThat(isMatch).isEqualTo(theShouldMatch);
+ }
}
diff --git a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcherR5IndexStorageOptimizedTest.java b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcherR5IndexStorageOptimizedTest.java
new file mode 100644
index 00000000000..77d46a98f07
--- /dev/null
+++ b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcherR5IndexStorageOptimizedTest.java
@@ -0,0 +1,45 @@
+package ca.uhn.fhir.jpa.searchparam.matcher;
+
+import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
+import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
+import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
+import org.hl7.fhir.r5.model.Observation;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+
+
+public class InMemoryResourceMatcherR5IndexStorageOptimizedTest extends InMemoryResourceMatcherR5Test {
+
+ @Override
+ @BeforeEach
+ public void before() {
+ super.before();
+ myStorageSettings.setIndexStorageOptimized(true);
+ }
+
+ @AfterEach
+ public void after() {
+ myStorageSettings.setIndexStorageOptimized(false);
+ }
+
+ @Override
+ protected ResourceIndexedSearchParamDate extractEffectiveDateParam(Observation theObservation) {
+ ResourceIndexedSearchParamDate searchParamDate = super.extractEffectiveDateParam(theObservation);
+ searchParamDate.optimizeIndexStorage();
+ return searchParamDate;
+ }
+
+ @Override
+ protected ResourceIndexedSearchParamToken extractCodeTokenParam(Observation theObservation) {
+ ResourceIndexedSearchParamToken searchParamToken = super.extractCodeTokenParam(theObservation);
+ searchParamToken.optimizeIndexStorage();
+ return searchParamToken;
+ }
+
+ @Override
+ protected ResourceIndexedSearchParamUri extractSourceUriParam(Observation theObservation) {
+ ResourceIndexedSearchParamUri searchParamUri = super.extractSourceUriParam(theObservation);
+ searchParamUri.optimizeIndexStorage();
+ return searchParamUri;
+ }
+}
diff --git a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcherR5Test.java b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcherR5Test.java
index 59eb798e194..ccee696f68f 100644
--- a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcherR5Test.java
+++ b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcherR5Test.java
@@ -34,6 +34,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
+import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import java.time.Duration;
@@ -51,6 +52,7 @@ import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(SpringExtension.class)
+@ContextConfiguration(classes = {InMemoryResourceMatcherR5Test.SpringConfig.class})
public class InMemoryResourceMatcherR5Test {
public static final String OBSERVATION_DATE = "1970-10-17";
public static final String OBSERVATION_DATETIME = OBSERVATION_DATE + "T01:00:00-08:30";
@@ -76,6 +78,8 @@ public class InMemoryResourceMatcherR5Test {
IndexedSearchParamExtractor myIndexedSearchParamExtractor;
@Autowired
private InMemoryResourceMatcher myInMemoryResourceMatcher;
+ @Autowired
+ StorageSettings myStorageSettings;
private Observation myObservation;
private ResourceIndexedSearchParams mySearchParams;
@@ -414,17 +418,17 @@ public class InMemoryResourceMatcherR5Test {
}
@Nonnull
- private ResourceIndexedSearchParamDate extractEffectiveDateParam(Observation theObservation) {
+ protected ResourceIndexedSearchParamDate extractEffectiveDateParam(Observation theObservation) {
BaseDateTimeType dateValue = (BaseDateTimeType) theObservation.getEffective();
- return new ResourceIndexedSearchParamDate(new PartitionSettings(), "Patient", "date", dateValue.getValue(), dateValue.getValueAsString(), dateValue.getValue(), dateValue.getValueAsString(), dateValue.getValueAsString());
+ return new ResourceIndexedSearchParamDate(new PartitionSettings(), "Observation", "date", dateValue.getValue(), dateValue.getValueAsString(), dateValue.getValue(), dateValue.getValueAsString(), dateValue.getValueAsString());
}
- private ResourceIndexedSearchParamToken extractCodeTokenParam(Observation theObservation) {
+ protected ResourceIndexedSearchParamToken extractCodeTokenParam(Observation theObservation) {
Coding coding = theObservation.getCode().getCodingFirstRep();
return new ResourceIndexedSearchParamToken(new PartitionSettings(), "Observation", "code", coding.getSystem(), coding.getCode());
}
- private ResourceIndexedSearchParamUri extractSourceUriParam(Observation theObservation) {
+ protected ResourceIndexedSearchParamUri extractSourceUriParam(Observation theObservation) {
String source = theObservation.getMeta().getSource();
return new ResourceIndexedSearchParamUri(new PartitionSettings(), "Observation", "_source", source);
}
diff --git a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCacheTest.java b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCacheTest.java
index 0d8786b33d0..5d924313a15 100644
--- a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCacheTest.java
+++ b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCacheTest.java
@@ -1,17 +1,25 @@
package ca.uhn.fhir.jpa.searchparam.registry;
import ca.uhn.fhir.context.ComboSearchParamType;
+import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.util.SearchParamHash;
+import ca.uhn.fhir.rest.server.util.IndexedSearchParam;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.IdType;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
+import static ca.uhn.fhir.util.HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -19,10 +27,11 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class JpaSearchParamCacheTest {
-
+ private static final FhirContext ourFhirContext = FhirContext.forR4Cached();
private static final String RESOURCE_TYPE = "Patient";
private TestableJpaSearchParamCache myJpaSearchParamCache;
+
@BeforeEach
public void beforeEach(){
myJpaSearchParamCache = new TestableJpaSearchParamCache();
@@ -93,6 +102,41 @@ public class JpaSearchParamCacheTest {
assertTrue(found.isEmpty());
}
+ @ParameterizedTest
+ @CsvSource({
+ "Patient, name, name, type = string",
+ "Patient, active, active, type = token",
+ "Patient, active, active:of-type, type = token with of-type",
+ "Patient, birthdate, birthdate, type = date",
+ "Patient, general-practitioner, general-practitioner, type = reference",
+ "Location, near, near, type = special",
+ "RiskAssessment, probability, probability, type = number",
+ "Observation, value-quantity, value-quantity, type = quantity",
+ "ValueSet, url, url, type = uri",
+ "Encounter, subject, subject.name, type = reference with refChain"
+ })
+ public void getIndexedSearchParamByHashIdentity_returnsCorrectIndexedSearchParam(String theResourceType,
+ String theSpName,
+ String theExpectedSpName,
+ String theSpType) {
+ // setup
+ RuntimeSearchParamCache runtimeCache = new RuntimeSearchParamCache();
+ RuntimeResourceDefinition resourceDefinition = ourFhirContext.getResourceDefinition(theResourceType);
+ RuntimeSearchParam runtimeSearchParam = resourceDefinition.getSearchParam(theSpName);
+ runtimeSearchParam.addUpliftRefchain("name", EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN);
+ runtimeCache.add(theResourceType, theSpName, resourceDefinition.getSearchParam(theSpName));
+ Long hashIdentity = SearchParamHash.hashSearchParam(new PartitionSettings(), null, theResourceType, theExpectedSpName);
+
+ // execute
+ myJpaSearchParamCache.populateActiveSearchParams(null, null, runtimeCache);
+ Optional indexedSearchParam = myJpaSearchParamCache.getIndexedSearchParamByHashIdentity(hashIdentity);
+
+ // validate
+ assertTrue(indexedSearchParam.isPresent(), "No IndexedSearchParam found for search param with " + theSpType);
+ assertEquals(theResourceType, indexedSearchParam.get().getResourceType());
+ assertEquals(theExpectedSpName, indexedSearchParam.get().getParameterName());
+ }
+
private RuntimeSearchParam createSearchParam(ComboSearchParamType theType){
return createSearchParam(null, theType);
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4IndexStorageOptimizedTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4IndexStorageOptimizedTest.java
new file mode 100644
index 00000000000..d94b0089337
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4IndexStorageOptimizedTest.java
@@ -0,0 +1,362 @@
+package ca.uhn.fhir.jpa.dao.r4;
+
+import ca.uhn.fhir.batch2.api.IJobCoordinator;
+import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx;
+import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters;
+import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
+import ca.uhn.fhir.context.ConfigurationException;
+import ca.uhn.fhir.i18n.Msg;
+import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
+import ca.uhn.fhir.jpa.config.SearchConfig;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
+import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
+import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamCoords;
+import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
+import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber;
+import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
+import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantityNormalized;
+import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
+import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
+import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
+import ca.uhn.fhir.jpa.model.entity.StorageSettings;
+import ca.uhn.fhir.jpa.model.util.SearchParamHash;
+import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
+import ca.uhn.fhir.jpa.reindex.ReindexStepTest;
+import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
+import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
+import ca.uhn.fhir.rest.param.BaseParam;
+import ca.uhn.fhir.rest.param.DateParam;
+import ca.uhn.fhir.rest.param.NumberParam;
+import ca.uhn.fhir.rest.param.QuantityParam;
+import ca.uhn.fhir.rest.param.SpecialParam;
+import ca.uhn.fhir.rest.param.StringParam;
+import ca.uhn.fhir.rest.param.TokenParam;
+import ca.uhn.fhir.rest.param.UriParam;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.DateType;
+import org.hl7.fhir.r4.model.DecimalType;
+import org.hl7.fhir.r4.model.Location;
+import org.hl7.fhir.r4.model.Observation;
+import org.hl7.fhir.r4.model.Patient;
+import org.hl7.fhir.r4.model.Quantity;
+import org.hl7.fhir.r4.model.RiskAssessment;
+import org.hl7.fhir.r4.model.Substance;
+import org.hl7.fhir.r4.model.ValueSet;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
+import org.junit.jupiter.params.provider.ValueSource;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.data.jpa.repository.JpaRepository;
+
+import java.util.List;
+
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+
+/**
+ * This test was added to check if changing {@link StorageSettings#isIndexStorageOptimized()} setting and performing
+ * $reindex operation will correctly null/recover sp_name, res_type, sp_updated parameters
+ * of ResourceIndexedSearchParam entities.
+ */
+public class FhirResourceDaoR4IndexStorageOptimizedTest extends BaseJpaR4Test {
+
+ @Autowired
+ private IJobCoordinator myJobCoordinator;
+
+ @Autowired
+ private SearchConfig mySearchConfig;
+
+ @AfterEach
+ void cleanUp() {
+ myPartitionSettings.setIncludePartitionInSearchHashes(false);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ public void testCoordinatesIndexedSearchParam_searchAndReindex_searchParamUpdatedCorrectly(boolean theIsIndexStorageOptimized) {
+ // setup
+ myStorageSettings.setIndexStorageOptimized(theIsIndexStorageOptimized);
+ Location loc = new Location();
+ loc.getPosition().setLatitude(43.7);
+ loc.getPosition().setLongitude(79.4);
+ IIdType id = myLocationDao.create(loc, mySrd).getId().toUnqualifiedVersionless();
+
+ validateAndReindex(theIsIndexStorageOptimized, myLocationDao, myResourceIndexedSearchParamCoordsDao, id,
+ Location.SP_NEAR, "Location", new SpecialParam().setValue("43.7|79.4"), ResourceIndexedSearchParamCoords.class);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ public void testDateIndexedSearchParam_searchAndReindex_searchParamUpdatedCorrectly(boolean theIsIndexStorageOptimized) {
+ // setup
+ myStorageSettings.setIndexStorageOptimized(theIsIndexStorageOptimized);
+ Patient p = new Patient();
+ p.setBirthDateElement(new DateType("2021-02-22"));
+ IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
+
+ validateAndReindex(theIsIndexStorageOptimized, myPatientDao, myResourceIndexedSearchParamDateDao, id,
+ Patient.SP_BIRTHDATE, "Patient", new DateParam("2021-02-22"), ResourceIndexedSearchParamDate.class);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ public void testNumberIndexedSearchParam_searchAndReindex_searchParamUpdatedCorrectly(boolean theIsIndexStorageOptimized) {
+ // setup
+ myStorageSettings.setIndexStorageOptimized(theIsIndexStorageOptimized);
+ RiskAssessment riskAssessment = new RiskAssessment();
+ DecimalType doseNumber = new DecimalType(15);
+ riskAssessment.addPrediction(new RiskAssessment.RiskAssessmentPredictionComponent().setProbability(doseNumber));
+ IIdType id = myRiskAssessmentDao.create(riskAssessment, mySrd).getId().toUnqualifiedVersionless();
+
+ validateAndReindex(theIsIndexStorageOptimized, myRiskAssessmentDao, myResourceIndexedSearchParamNumberDao, id,
+ RiskAssessment.SP_PROBABILITY, "RiskAssessment", new NumberParam(15), ResourceIndexedSearchParamNumber.class);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ public void testQuantityIndexedSearchParam_searchAndReindex_searchParamUpdatedCorrectly(boolean theIsIndexStorageOptimized) {
+ // setup
+ myStorageSettings.setIndexStorageOptimized(theIsIndexStorageOptimized);
+ Observation observation = new Observation();
+ observation.setValue(new Quantity(123));
+ IIdType id = myObservationDao.create(observation, mySrd).getId().toUnqualifiedVersionless();
+
+ validateAndReindex(theIsIndexStorageOptimized, myObservationDao, myResourceIndexedSearchParamQuantityDao, id,
+ Observation.SP_VALUE_QUANTITY, "Observation", new QuantityParam(123), ResourceIndexedSearchParamQuantity.class);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ public void testQuantityNormalizedIndexedSearchParam_searchAndReindex_searchParamUpdatedCorrectly(boolean theIsIndexStorageOptimized) {
+ // setup
+ myStorageSettings.setIndexStorageOptimized(theIsIndexStorageOptimized);
+ myStorageSettings.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_STORAGE_SUPPORTED);
+ Substance res = new Substance();
+ res.addInstance().getQuantity().setSystem(UcumServiceUtil.UCUM_CODESYSTEM_URL).setCode("m").setValue(123);
+ IIdType id = mySubstanceDao.create(res, mySrd).getId().toUnqualifiedVersionless();
+
+ QuantityParam quantityParam = new QuantityParam(null, 123, UcumServiceUtil.UCUM_CODESYSTEM_URL, "m");
+ validateAndReindex(theIsIndexStorageOptimized, mySubstanceDao, myResourceIndexedSearchParamQuantityNormalizedDao,
+ id, Substance.SP_QUANTITY, "Substance", quantityParam, ResourceIndexedSearchParamQuantityNormalized.class);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ public void testStringIndexedSearchParam_searchAndReindex_searchParamUpdatedCorrectly(boolean theIsIndexStorageOptimized) {
+ // setup
+ myStorageSettings.setIndexStorageOptimized(theIsIndexStorageOptimized);
+ Patient p = new Patient();
+ p.addAddress().addLine("123 Main Street");
+ IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
+
+ validateAndReindex(theIsIndexStorageOptimized, myPatientDao, myResourceIndexedSearchParamStringDao, id,
+ Patient.SP_ADDRESS, "Patient", new StringParam("123 Main Street"), ResourceIndexedSearchParamString.class);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ public void testTokenIndexedSearchParam_searchAndReindex_searchParamUpdatedCorrectly(boolean theIsIndexStorageOptimized) {
+ // setup
+ myStorageSettings.setIndexStorageOptimized(theIsIndexStorageOptimized);
+ Observation observation = new Observation();
+ observation.setStatus(Observation.ObservationStatus.FINAL);
+ IIdType id = myObservationDao.create(observation, mySrd).getId().toUnqualifiedVersionless();
+
+ validateAndReindex(theIsIndexStorageOptimized, myObservationDao, myResourceIndexedSearchParamTokenDao, id,
+ Observation.SP_STATUS, "Observation", new TokenParam("final"), ResourceIndexedSearchParamToken.class);
+ }
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ public void testUriIndexedSearchParam_searchAndReindex_searchParamUpdatedCorrectly(boolean theIsIndexStorageOptimized) {
+ // setup
+ myStorageSettings.setIndexStorageOptimized(theIsIndexStorageOptimized);
+ ValueSet valueSet = new ValueSet();
+ valueSet.setUrl("http://vs");
+ IIdType id = myValueSetDao.create(valueSet, mySrd).getId().toUnqualifiedVersionless();
+
+ validateAndReindex(theIsIndexStorageOptimized, myValueSetDao, myResourceIndexedSearchParamUriDao, id,
+ ValueSet.SP_URL, "ValueSet", new UriParam("http://vs"), ResourceIndexedSearchParamUri.class);
+ }
+
+ @ParameterizedTest
+ @CsvSource({
+ "false, false, false",
+ "false, false, true",
+ "false, true, false",
+ "true, false, false",
+ "true, false, true",
+ "true, true, false"})
+ public void testValidateConfiguration_withCorrectConfiguration_doesNotThrowException(boolean thePartitioningEnabled,
+ boolean theIsIncludePartitionInSearchHashes,
+ boolean theIsIndexStorageOptimized) {
+ myPartitionSettings.setPartitioningEnabled(thePartitioningEnabled);
+ myPartitionSettings.setIncludePartitionInSearchHashes(theIsIncludePartitionInSearchHashes);
+ myStorageSettings.setIndexStorageOptimized(theIsIndexStorageOptimized);
+
+ assertDoesNotThrow(() -> mySearchConfig.validateConfiguration());
+ }
+
+ @Test
+ public void testValidateConfiguration_withInCorrectConfiguration_throwsException() {
+ myPartitionSettings.setIncludePartitionInSearchHashes(true);
+ myPartitionSettings.setPartitioningEnabled(true);
+ myStorageSettings.setIndexStorageOptimized(true);
+
+ try {
+ mySearchConfig.validateConfiguration();
+ fail();
+ } catch (ConfigurationException e) {
+ assertEquals(Msg.code(2525) + "Incorrect configuration. "
+ + "StorageSettings#isIndexStorageOptimized and PartitionSettings.isIncludePartitionInSearchHashes "
+ + "cannot be enabled at the same time.", e.getMessage());
+ }
+ }
+
+ private void validateAndReindex(boolean theIsIndexStorageOptimized, IFhirResourceDao extends IBaseResource> theResourceDao,
+ JpaRepository extends BaseResourceIndexedSearchParam, Long> theIndexedSpRepository, IIdType theId,
+ String theSearchParam, String theResourceType, BaseParam theParamValue,
+ Class extends BaseResourceIndexedSearchParam> theIndexedSearchParamClass) {
+ // validate
+ validateSearchContainsResource(theResourceDao, theId, theSearchParam, theParamValue);
+ validateSearchParams(theIndexedSpRepository, theId, theSearchParam, theResourceType, theIndexedSearchParamClass);
+
+ // switch on/off storage optimization and run $reindex
+ myStorageSettings.setIndexStorageOptimized(!theIsIndexStorageOptimized);
+ executeReindex(theResourceType + "?");
+
+ // validate again
+ validateSearchContainsResource(theResourceDao, theId, theSearchParam, theParamValue);
+ validateSearchParams(theIndexedSpRepository, theId, theSearchParam, theResourceType, theIndexedSearchParamClass);
+ }
+
+ private void validateSearchParams(JpaRepository extends BaseResourceIndexedSearchParam, Long> theIndexedSpRepository,
+ IIdType theId, String theSearchParam, String theResourceType,
+ Class extends BaseResourceIndexedSearchParam> theIndexedSearchParamClass) {
+ List extends BaseResourceIndexedSearchParam> repositorySearchParams =
+ getAndValidateIndexedSearchParamsRepository(theIndexedSpRepository, theId, theSearchParam, theResourceType);
+
+ long hash = SearchParamHash.hashSearchParam(new PartitionSettings(), null, theResourceType, theSearchParam);
+ if (myStorageSettings.isIndexStorageOptimized()) {
+ // validated sp_name, res_type, sp_updated columns are null in DB
+ runInTransaction(() -> {
+ List> results = myEntityManager.createQuery("SELECT i FROM " + theIndexedSearchParamClass.getSimpleName() +
+ " i WHERE i.myResourcePid = " + theId.getIdPartAsLong() + " AND i.myResourceType IS NULL " +
+ "AND i.myParamName IS NULL AND i.myUpdated IS NULL AND i.myHashIdentity = " + hash, theIndexedSearchParamClass).getResultList();
+ assertFalse(results.isEmpty());
+ assertEquals(repositorySearchParams.size(), results.size());
+ });
+ } else {
+ // validated sp_name, res_type, sp_updated columns are not null in DB
+ runInTransaction(() -> {
+ List> results = myEntityManager.createQuery("SELECT i FROM " + theIndexedSearchParamClass.getSimpleName() +
+ " i WHERE i.myResourcePid = " + theId.getIdPartAsLong() + " AND i.myResourceType = '" + theResourceType +
+ "' AND i.myParamName = '" + theSearchParam + "' AND i.myUpdated IS NOT NULL AND i.myHashIdentity = " + hash,
+ theIndexedSearchParamClass).getResultList();
+ assertFalse(results.isEmpty());
+ assertEquals(repositorySearchParams.size(), results.size());
+ });
+ }
+ }
+
+ private List extends BaseResourceIndexedSearchParam> getAndValidateIndexedSearchParamsRepository(
+ JpaRepository extends BaseResourceIndexedSearchParam, Long> theIndexedSpRepository,
+ IIdType theId, String theSearchParam, String theResourceType) {
+
+ List extends BaseResourceIndexedSearchParam> repositorySearchParams = theIndexedSpRepository.findAll()
+ .stream()
+ .filter(sp -> sp.getResourcePid().equals(theId.getIdPartAsLong()))
+ .filter(sp -> theSearchParam.equals(sp.getParamName()))
+ .toList();
+ assertFalse(repositorySearchParams.isEmpty());
+
+ repositorySearchParams.forEach(sp -> {
+ assertEquals(theResourceType, sp.getResourceType());
+ if (myStorageSettings.isIndexStorageOptimized()) {
+ assertNull(sp.getUpdated());
+ } else {
+ assertNotNull(sp.getUpdated());
+ }
+ });
+
+ return repositorySearchParams;
+ }
+
+ private void validateSearchContainsResource(IFhirResourceDao extends IBaseResource> theResourceDao,
+ IIdType theId,
+ String theSearchParam,
+ BaseParam theParamValue) {
+ SearchParameterMap searchParameterMap = new SearchParameterMap()
+ .setLoadSynchronous(true)
+ .add(theSearchParam, theParamValue);
+ List listIds = toUnqualifiedVersionlessIds(theResourceDao.search(searchParameterMap));
+
+ assertTrue(listIds.contains(theId));
+ }
+
+ private void executeReindex(String... theUrls) {
+ ReindexJobParameters parameters = new ReindexJobParameters();
+ for (String url : theUrls) {
+ parameters.addUrl(url);
+ }
+ JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
+ startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
+ startRequest.setParameters(parameters);
+ Batch2JobStartResponse res = myJobCoordinator.startInstance(mySrd, startRequest);
+ ourLog.info("Started reindex job with id {}", res.getInstanceId());
+ myBatch2JobHelper.awaitJobCompletion(res);
+ }
+
+ // Additional existing tests with enabled IndexStorageOptimized
+ @Nested
+ public class IndexStorageOptimizedReindexStepTest extends ReindexStepTest {
+ @BeforeEach
+ void setUp() {
+ myStorageSettings.setIndexStorageOptimized(true);
+ }
+ }
+
+ @Nested
+ public class IndexStorageOptimizedPartitioningSqlR4Test extends PartitioningSqlR4Test {
+ @BeforeEach
+ void setUp() {
+ myStorageSettings.setIndexStorageOptimized(true);
+ }
+ }
+
+ @Nested
+ public class IndexStorageOptimizedFhirResourceDaoR4SearchMissingTest extends FhirResourceDaoR4SearchMissingTest {
+ @BeforeEach
+ void setUp() {
+ myStorageSettings.setIndexStorageOptimized(true);
+ }
+ }
+
+ @Nested
+ public class IndexStorageOptimizedFhirResourceDaoR4QueryCountTest extends FhirResourceDaoR4QueryCountTest {
+ @BeforeEach
+ void setUp() {
+ myStorageSettings.setIndexStorageOptimized(true);
+ }
+ }
+
+ @Nested
+ public class IndexStorageOptimizedFhirResourceDaoR4SearchNoFtTest extends FhirResourceDaoR4SearchNoFtTest {
+ @BeforeEach
+ void setUp() {
+ myStorageSettings.setIndexStorageOptimized(true);
+ }
+ }
+}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
index a21183839e6..c90a55b210c 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4Test.java
@@ -7155,6 +7155,20 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
return new InstantDt(theDate).getValueAsString();
}
+ @Nested
+ public class IndexStorageOptimizedMissingSearchParameterTests extends MissingSearchParameterTests {
+ @BeforeEach
+ public void init() {
+ super.init();
+ myStorageSettings.setIndexStorageOptimized(true);
+ }
+
+ @AfterEach
+ public void cleanUp() {
+ myStorageSettings.setIndexStorageOptimized(false);
+ }
+ }
+
@Nested
public class MissingSearchParameterTests {
diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5IndexStorageOptimizedTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5IndexStorageOptimizedTest.java
new file mode 100644
index 00000000000..232b13c9745
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5IndexStorageOptimizedTest.java
@@ -0,0 +1,52 @@
+package ca.uhn.fhir.jpa.dao.r5;
+
+import ca.uhn.fhir.jpa.model.entity.StorageSettings;
+import ca.uhn.fhir.jpa.search.reindex.InstanceReindexServiceImplR5Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Nested;
+
+/**
+ * R5 Test cases with enabled {@link StorageSettings#isIndexStorageOptimized()}
+ */
+public class FhirResourceDaoR5IndexStorageOptimizedTest {
+
+ @Nested
+ public class IndexStorageOptimizedFhirSystemDaoTransactionR5Test extends FhirSystemDaoTransactionR5Test {
+ @BeforeEach
+ public void setUp() {
+ myStorageSettings.setIndexStorageOptimized(true);
+ }
+
+ @AfterEach
+ public void cleanUp() {
+ myStorageSettings.setIndexStorageOptimized(false);
+ }
+ }
+
+ @Nested
+ public class IndexStorageOptimizedInstanceReindexServiceImplR5Test extends InstanceReindexServiceImplR5Test {
+ @BeforeEach
+ public void setUp() {
+ myStorageSettings.setIndexStorageOptimized(true);
+ }
+
+ @AfterEach
+ public void cleanUp() {
+ myStorageSettings.setIndexStorageOptimized(false);
+ }
+ }
+
+ @Nested
+ public class IndexStorageOptimizedUpliftedRefchainsAndChainedSortingR5Test extends UpliftedRefchainsAndChainedSortingR5Test {
+ @BeforeEach
+ public void setUp() {
+ myStorageSettings.setIndexStorageOptimized(true);
+ }
+
+ @AfterEach
+ public void cleanUp() {
+ myStorageSettings.setIndexStorageOptimized(false);
+ }
+ }
+}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/IndexedSearchParam.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/IndexedSearchParam.java
new file mode 100644
index 00000000000..a4513480c88
--- /dev/null
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/IndexedSearchParam.java
@@ -0,0 +1,42 @@
+/*-
+ * #%L
+ * HAPI FHIR - Server Framework
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.rest.server.util;
+
+/**
+ * Simplified model of indexed search parameter
+ */
+public class IndexedSearchParam {
+
+ private final String myParameterName;
+ private final String myResourceType;
+
+ public IndexedSearchParam(String theParameterName, String theResourceType) {
+ this.myParameterName = theParameterName;
+ this.myResourceType = theResourceType;
+ }
+
+ public String getParameterName() {
+ return myParameterName;
+ }
+
+ public String getResourceType() {
+ return myResourceType;
+ }
+}
From f767058239c8f331d7bf2bbea69e58e9c78b1652 Mon Sep 17 00:00:00 2001
From: Jens Kristian Villadsen
Date: Sat, 22 Jun 2024 01:46:39 +0200
Subject: [PATCH 08/19] Subscription fhirpath criteria (#5975)
* Added fhirpath-criteria evaluation
* Added test for valid fhirpath that does not evaluate to boolean
* Added resolving the variables %current and %previous
* Added test using only FP criteria
* Added test cases for valid FhirPath expressions that return non-booleans
* Added use of central cache
* Added more elaborate tests for non-sunshine scenarios
* Added changelog
* CheckStyle'd errorcode added.
* Added spotless formatting and converted FhirPathR5 expression to be Android compatible
* Applied more spotless
---
.../fhirpath/IFhirPathEvaluationContext.java | 13 +
...031-add-support-for-fhirpath-criteria.yaml | 4 +
.../jpa/topic/SubscriptionTopicConfig.java | 6 +-
.../jpa/topic/SubscriptionTopicMatcher.java | 10 +-
.../SubscriptionTopicMatchingSubscriber.java | 9 +-
.../jpa/topic/SubscriptionTriggerMatcher.java | 106 ++++++-
.../topic/SubscriptionTriggerMatcherTest.java | 261 +++++++++++++++++-
.../uhn/fhir/jpa/util/MemoryCacheService.java | 2 +
.../hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java | 8 +-
9 files changed, 399 insertions(+), 20 deletions(-)
create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6031-add-support-for-fhirpath-criteria.yaml
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/IFhirPathEvaluationContext.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/IFhirPathEvaluationContext.java
index e08e2b1ea4b..b492a042817 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/IFhirPathEvaluationContext.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/IFhirPathEvaluationContext.java
@@ -24,6 +24,8 @@ import jakarta.annotation.Nullable;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IIdType;
+import java.util.List;
+
public interface IFhirPathEvaluationContext {
/**
@@ -36,4 +38,15 @@ public interface IFhirPathEvaluationContext {
default IBase resolveReference(@Nonnull IIdType theReference, @Nullable IBase theContext) {
return null;
}
+
+ /**
+ *
+ * @param appContext
+ * @param name The name of the constant(s) to be resolved
+ * @param beforeContext
+ * @return
+ */
+ default List resolveConstant(Object appContext, String name, boolean beforeContext) {
+ return null;
+ }
}
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6031-add-support-for-fhirpath-criteria.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6031-add-support-for-fhirpath-criteria.yaml
new file mode 100644
index 00000000000..6df40dfd7ad
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6031-add-support-for-fhirpath-criteria.yaml
@@ -0,0 +1,4 @@
+---
+type: add
+issue: 6031
+title: "Subscriptions now support the evaluation use of FhirPath criteria and the use of the variables %current and %previous. Thanks to Jens Villadsen (@jkiddo) for the contribution!"
diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java
index 802f43a47e5..3b5225246c0 100644
--- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java
+++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java
@@ -24,6 +24,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher;
import ca.uhn.fhir.jpa.subscription.config.SubscriptionConfig;
import ca.uhn.fhir.jpa.subscription.submit.interceptor.SubscriptionQueryValidator;
+import ca.uhn.fhir.jpa.util.MemoryCacheService;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@@ -33,11 +34,12 @@ import org.springframework.context.annotation.Lazy;
@Import(SubscriptionConfig.class)
public class SubscriptionTopicConfig {
@Bean
- SubscriptionTopicMatchingSubscriber subscriptionTopicMatchingSubscriber(FhirContext theFhirContext) {
+ SubscriptionTopicMatchingSubscriber subscriptionTopicMatchingSubscriber(
+ FhirContext theFhirContext, MemoryCacheService memoryCacheService) {
switch (theFhirContext.getVersion().getVersion()) {
case R5:
case R4B:
- return new SubscriptionTopicMatchingSubscriber(theFhirContext);
+ return new SubscriptionTopicMatchingSubscriber(theFhirContext, memoryCacheService);
default:
return null;
}
diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatcher.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatcher.java
index 93bfe7c0346..77ba9bbd8bb 100644
--- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatcher.java
+++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatcher.java
@@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.topic;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
+import ca.uhn.fhir.jpa.util.MemoryCacheService;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r5.model.SubscriptionTopic;
@@ -29,10 +30,15 @@ import java.util.List;
public class SubscriptionTopicMatcher {
private final SubscriptionTopicSupport mySubscriptionTopicSupport;
private final SubscriptionTopic myTopic;
+ private final MemoryCacheService myMemoryCacheService;
- public SubscriptionTopicMatcher(SubscriptionTopicSupport theSubscriptionTopicSupport, SubscriptionTopic theTopic) {
+ public SubscriptionTopicMatcher(
+ SubscriptionTopicSupport theSubscriptionTopicSupport,
+ SubscriptionTopic theTopic,
+ MemoryCacheService memoryCacheService) {
mySubscriptionTopicSupport = theSubscriptionTopicSupport;
myTopic = theTopic;
+ myMemoryCacheService = memoryCacheService;
}
public InMemoryMatchResult match(ResourceModifiedMessage theMsg) {
@@ -43,7 +49,7 @@ public class SubscriptionTopicMatcher {
for (SubscriptionTopic.SubscriptionTopicResourceTriggerComponent next : triggers) {
if (resourceName.equals(next.getResource())) {
SubscriptionTriggerMatcher matcher =
- new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, theMsg, next);
+ new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, theMsg, next, myMemoryCacheService);
InMemoryMatchResult result = matcher.match();
if (result.matched()) {
// as soon as one trigger matches, we're done
diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatchingSubscriber.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatchingSubscriber.java
index c1776bf2983..67151db08ec 100644
--- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatchingSubscriber.java
+++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatchingSubscriber.java
@@ -27,6 +27,7 @@ import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.jpa.topic.filter.InMemoryTopicFilterMatcher;
+import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.subscription.api.IResourceModifiedMessagePersistenceSvc;
import ca.uhn.fhir.util.Logs;
@@ -67,8 +68,11 @@ public class SubscriptionTopicMatchingSubscriber implements MessageHandler {
@Autowired
private IResourceModifiedMessagePersistenceSvc myResourceModifiedMessagePersistenceSvc;
- public SubscriptionTopicMatchingSubscriber(FhirContext theFhirContext) {
+ private MemoryCacheService myMemoryCacheService;
+
+ public SubscriptionTopicMatchingSubscriber(FhirContext theFhirContext, MemoryCacheService memoryCacheService) {
myFhirContext = theFhirContext;
+ this.myMemoryCacheService = memoryCacheService;
}
@Override
@@ -110,7 +114,8 @@ public class SubscriptionTopicMatchingSubscriber implements MessageHandler {
Collection topics = mySubscriptionTopicRegistry.getAll();
for (SubscriptionTopic topic : topics) {
- SubscriptionTopicMatcher matcher = new SubscriptionTopicMatcher(mySubscriptionTopicSupport, topic);
+ SubscriptionTopicMatcher matcher =
+ new SubscriptionTopicMatcher(mySubscriptionTopicSupport, topic, myMemoryCacheService);
InMemoryMatchResult result = matcher.match(theMsg);
if (result.matched()) {
int deliveries = deliverToTopicSubscriptions(theMsg, topic, result);
diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTriggerMatcher.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTriggerMatcher.java
index f6cea086ee5..7970e8c13ee 100644
--- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTriggerMatcher.java
+++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTriggerMatcher.java
@@ -19,17 +19,26 @@
*/
package ca.uhn.fhir.jpa.topic;
+import ca.uhn.fhir.fhirpath.IFhirPath;
+import ca.uhn.fhir.fhirpath.IFhirPathEvaluationContext;
+import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
+import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.messaging.BaseResourceMessage;
import ca.uhn.fhir.storage.PreviousVersionReader;
import ca.uhn.fhir.util.Logs;
+import com.google.common.base.Strings;
+import org.hl7.fhir.exceptions.FHIRException;
+import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.r5.model.BooleanType;
import org.hl7.fhir.r5.model.Enumeration;
import org.hl7.fhir.r5.model.SubscriptionTopic;
import org.slf4j.Logger;
+import org.slf4j.helpers.MessageFormatter;
import java.util.List;
import java.util.Optional;
@@ -45,11 +54,13 @@ public class SubscriptionTriggerMatcher {
private final IFhirResourceDao myDao;
private final PreviousVersionReader myPreviousVersionReader;
private final SystemRequestDetails mySrd;
+ private final MemoryCacheService myMemoryCacheService;
public SubscriptionTriggerMatcher(
SubscriptionTopicSupport theSubscriptionTopicSupport,
ResourceModifiedMessage theMsg,
- SubscriptionTopic.SubscriptionTopicResourceTriggerComponent theTrigger) {
+ SubscriptionTopic.SubscriptionTopicResourceTriggerComponent theTrigger,
+ MemoryCacheService theMemoryCacheService) {
mySubscriptionTopicSupport = theSubscriptionTopicSupport;
myOperation = theMsg.getOperationType();
myResource = theMsg.getPayload(theSubscriptionTopicSupport.getFhirContext());
@@ -58,6 +69,7 @@ public class SubscriptionTriggerMatcher {
myTrigger = theTrigger;
myPreviousVersionReader = new PreviousVersionReader(myDao);
mySrd = new SystemRequestDetails();
+ myMemoryCacheService = theMemoryCacheService;
}
public InMemoryMatchResult match() {
@@ -66,21 +78,22 @@ public class SubscriptionTriggerMatcher {
if (SubscriptionTopicUtil.matches(myOperation, supportedInteractions)) {
SubscriptionTopic.SubscriptionTopicResourceTriggerQueryCriteriaComponent queryCriteria =
myTrigger.getQueryCriteria();
- InMemoryMatchResult result = match(queryCriteria);
- if (result.matched()) {
- return result;
- }
+ String fhirPathCriteria = myTrigger.getFhirPathCriteria();
+ return match(queryCriteria, fhirPathCriteria);
}
return InMemoryMatchResult.noMatch();
}
private InMemoryMatchResult match(
- SubscriptionTopic.SubscriptionTopicResourceTriggerQueryCriteriaComponent theQueryCriteria) {
+ SubscriptionTopic.SubscriptionTopicResourceTriggerQueryCriteriaComponent theQueryCriteria,
+ String theFhirPathCriteria) {
String previousCriteria = theQueryCriteria.getPrevious();
String currentCriteria = theQueryCriteria.getCurrent();
InMemoryMatchResult previousMatches = InMemoryMatchResult.fromBoolean(previousCriteria == null);
InMemoryMatchResult currentMatches = InMemoryMatchResult.fromBoolean(currentCriteria == null);
+ InMemoryMatchResult fhirPathCriteriaEvaluationResult = evaluateFhirPathCriteria(theFhirPathCriteria);
+
// WIP STR5 implement fhirPathCriteria per https://build.fhir.org/subscriptiontopic.html#fhirpath-criteria
if (currentCriteria != null) {
currentMatches = matchResource(myResource, currentCriteria);
@@ -105,12 +118,89 @@ public class SubscriptionTriggerMatcher {
}
// WIP STR5 implement resultForCreate and resultForDelete
if (theQueryCriteria.getRequireBoth()) {
- return InMemoryMatchResult.and(previousMatches, currentMatches);
+ return InMemoryMatchResult.and(
+ InMemoryMatchResult.and(previousMatches, currentMatches), fhirPathCriteriaEvaluationResult);
} else {
- return InMemoryMatchResult.or(previousMatches, currentMatches);
+ return InMemoryMatchResult.and(
+ InMemoryMatchResult.or(previousMatches, currentMatches), fhirPathCriteriaEvaluationResult);
}
}
+ private InMemoryMatchResult evaluateFhirPathCriteria(String theFhirPathCriteria) {
+ if (!Strings.isNullOrEmpty(theFhirPathCriteria)) {
+ IFhirPath fhirPathEngine =
+ mySubscriptionTopicSupport.getFhirContext().newFhirPath();
+ fhirPathEngine.setEvaluationContext(new IFhirPathEvaluationContext() {
+
+ @Override
+ public List resolveConstant(Object appContext, String name, boolean beforeContext) {
+ if ("current".equalsIgnoreCase(name)) return List.of(myResource);
+
+ if ("previous".equalsIgnoreCase(name)) {
+ Optional previousResource = myPreviousVersionReader.readPreviousVersion(myResource);
+ if (previousResource.isPresent()) return List.of((IBase) previousResource.get());
+ }
+
+ return null;
+ }
+ });
+ try {
+ IFhirPath.IParsedExpression expression = myMemoryCacheService.get(
+ MemoryCacheService.CacheEnum.FHIRPATH_EXPRESSION, theFhirPathCriteria, exp -> {
+ try {
+ return fhirPathEngine.parse(exp);
+ } catch (FHIRException e) {
+ throw e;
+ } catch (Exception e) {
+ throw new RuntimeException(Msg.code(2534) + e.getMessage(), e);
+ }
+ });
+
+ List result = fhirPathEngine.evaluate(myResource, expression, IBase.class);
+
+ return parseResult(theFhirPathCriteria, result);
+
+ } catch (FHIRException fhirException) {
+ ourLog.warn(
+ "Subscription topic {} has a fhirPathCriteria that is not valid: {}",
+ myTrigger.getId(),
+ theFhirPathCriteria,
+ fhirException);
+ return InMemoryMatchResult.unsupportedFromReason(fhirException.getMessage());
+ }
+ }
+ return InMemoryMatchResult.fromBoolean(true);
+ }
+
+ private InMemoryMatchResult parseResult(String theFhirPathCriteria, List result) {
+ if (result == null) {
+ return InMemoryMatchResult.unsupportedFromReason(MessageFormatter.format(
+ "FhirPath evaluation criteria '{}' from Subscription topic: '{}' resulted in null results.",
+ theFhirPathCriteria,
+ myTrigger.getId())
+ .getMessage());
+ }
+
+ if (result.size() != 1) {
+ return InMemoryMatchResult.unsupportedFromReason(MessageFormatter.arrayFormat(
+ "FhirPath evaluation criteria '{}' from Subscription topic: '{}' resulted in '{}' results. Expected 1.",
+ new String[] {theFhirPathCriteria, myTrigger.getId(), String.valueOf(result.size())})
+ .getMessage());
+ }
+
+ if (!(result.get(0) instanceof BooleanType)) {
+ return InMemoryMatchResult.unsupportedFromReason(MessageFormatter.arrayFormat(
+ "FhirPath evaluation criteria '{}' from Subscription topic: '{}' resulted in a non-boolean result: '{}'",
+ new String[] {
+ theFhirPathCriteria,
+ myTrigger.getId(),
+ result.get(0).getClass().getName()
+ })
+ .getMessage());
+ }
+ return InMemoryMatchResult.fromBoolean(((BooleanType) result.get(0)).booleanValue());
+ }
+
private InMemoryMatchResult matchResource(IBaseResource theResource, String theCriteria) {
InMemoryMatchResult result =
mySubscriptionTopicSupport.getSearchParamMatcher().match(theCriteria, theResource, mySrd);
diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/topic/SubscriptionTriggerMatcherTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/topic/SubscriptionTriggerMatcherTest.java
index ae47a026bb7..27a1f147f64 100644
--- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/topic/SubscriptionTriggerMatcherTest.java
+++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/topic/SubscriptionTriggerMatcherTest.java
@@ -1,12 +1,15 @@
package ca.uhn.fhir.jpa.topic;
import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
+import ca.uhn.fhir.jpa.util.MemoryCacheService;
import org.hl7.fhir.r5.model.Encounter;
+import org.hl7.fhir.r5.model.Enumerations;
import org.hl7.fhir.r5.model.IdType;
import org.hl7.fhir.r5.model.SubscriptionTopic;
import org.junit.jupiter.api.BeforeEach;
@@ -15,7 +18,10 @@ import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
@@ -30,11 +36,14 @@ class SubscriptionTriggerMatcherTest {
@Mock
SearchParamMatcher mySearchParamMatcher;
+ MemoryCacheService myMemoryCacheService;
+
private SubscriptionTopicSupport mySubscriptionTopicSupport;
private Encounter myEncounter;
@BeforeEach
public void before() {
+ myMemoryCacheService = new MemoryCacheService(new JpaStorageSettings());
mySubscriptionTopicSupport = new SubscriptionTopicSupport(ourFhirContext, myDaoRegistry, mySearchParamMatcher);
myEncounter = new Encounter();
myEncounter.setIdElement(new IdType("Encounter", "123", "2"));
@@ -48,7 +57,7 @@ class SubscriptionTriggerMatcherTest {
SubscriptionTopic.SubscriptionTopicResourceTriggerComponent trigger = new SubscriptionTopic.SubscriptionTopicResourceTriggerComponent();
// run
- SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger);
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
InMemoryMatchResult result = svc.match();
// verify
@@ -65,7 +74,7 @@ class SubscriptionTriggerMatcherTest {
trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.CREATE);
// run
- SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger);
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
InMemoryMatchResult result = svc.match();
// verify
@@ -82,7 +91,7 @@ class SubscriptionTriggerMatcherTest {
trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.UPDATE);
// run
- SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger);
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
InMemoryMatchResult result = svc.match();
// verify
@@ -99,7 +108,7 @@ class SubscriptionTriggerMatcherTest {
trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.UPDATE);
// run
- SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger);
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
InMemoryMatchResult result = svc.match();
// verify
@@ -124,11 +133,253 @@ class SubscriptionTriggerMatcherTest {
when(mySearchParamMatcher.match(any(), any(), any())).thenReturn(InMemoryMatchResult.successfulMatch());
// run
- SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger);
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
InMemoryMatchResult result = svc.match();
// verify
assertTrue(result.matched());
}
+ @Test
+ public void testFalseFhirPathCriteriaEvaluation() {
+ ResourceModifiedMessage msg = new ResourceModifiedMessage(ourFhirContext, myEncounter, ResourceModifiedMessage.OperationTypeEnum.UPDATE);
+
+ // setup
+ SubscriptionTopic.SubscriptionTopicResourceTriggerComponent trigger = new SubscriptionTopic.SubscriptionTopicResourceTriggerComponent();
+ trigger.setResource("Encounter");
+ trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.UPDATE);
+ trigger.setFhirPathCriteria("false");
+
+ // run
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
+ InMemoryMatchResult result = svc.match();
+
+ // verify
+ assertFalse(result.matched());
+ }
+
+ @Test
+ public void testInvalidFhirPathCriteriaEvaluation() {
+ ResourceModifiedMessage msg = new ResourceModifiedMessage(ourFhirContext, myEncounter, ResourceModifiedMessage.OperationTypeEnum.UPDATE);
+
+ // setup
+ SubscriptionTopic.SubscriptionTopicResourceTriggerComponent trigger = new SubscriptionTopic.SubscriptionTopicResourceTriggerComponent();
+ trigger.setResource("Encounter");
+ trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.UPDATE);
+ trigger.setFhirPathCriteria("random text");
+
+ // run
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
+ InMemoryMatchResult result = svc.match();
+
+ // verify
+ assertFalse(result.matched());
+ assertEquals("Error @1, 2: Premature ExpressionNode termination at unexpected token \"text\"", result.getUnsupportedReason());
+ }
+
+ @Test
+ public void testInvalidBooleanOutcomeOfFhirPathCriteriaEvaluation() {
+ ResourceModifiedMessage msg = new ResourceModifiedMessage(ourFhirContext, myEncounter, ResourceModifiedMessage.OperationTypeEnum.UPDATE);
+
+ // setup
+ SubscriptionTopic.SubscriptionTopicResourceTriggerComponent trigger = new SubscriptionTopic.SubscriptionTopicResourceTriggerComponent();
+ trigger.setResource("Encounter");
+ trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.UPDATE);
+ trigger.setFhirPathCriteria("id");
+
+ // run
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
+ InMemoryMatchResult result = svc.match();
+
+ // verify
+ assertFalse(result.matched());
+ }
+
+ @Test
+ public void testValidFhirPathCriteriaEvaluation() {
+ ResourceModifiedMessage msg = new ResourceModifiedMessage(ourFhirContext, myEncounter, ResourceModifiedMessage.OperationTypeEnum.UPDATE);
+
+ // setup
+ SubscriptionTopic.SubscriptionTopicResourceTriggerComponent trigger = new SubscriptionTopic.SubscriptionTopicResourceTriggerComponent();
+ trigger.setResource("Encounter");
+ trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.UPDATE);
+ trigger.setFhirPathCriteria("id = " + myEncounter.getIdElement().getIdPart());
+
+ // run
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
+ InMemoryMatchResult result = svc.match();
+
+ // verify
+ assertTrue(result.matched());
+ }
+
+ @Test
+ public void testValidFhirPathCriteriaEvaluationUsingCurrent() {
+ ResourceModifiedMessage msg = new ResourceModifiedMessage(ourFhirContext, myEncounter, ResourceModifiedMessage.OperationTypeEnum.UPDATE);
+
+ // setup
+ SubscriptionTopic.SubscriptionTopicResourceTriggerComponent trigger = new SubscriptionTopic.SubscriptionTopicResourceTriggerComponent();
+ trigger.setResource("Encounter");
+ trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.UPDATE);
+ trigger.setFhirPathCriteria("%current.id = " + myEncounter.getIdElement().getIdPart());
+
+ // run
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
+ InMemoryMatchResult result = svc.match();
+
+ // verify
+ assertTrue(result.matched());
+ }
+
+ @Test
+ public void testValidFhirPathCriteriaEvaluationReturningNonBoolean() {
+ ResourceModifiedMessage msg = new ResourceModifiedMessage(ourFhirContext, myEncounter, ResourceModifiedMessage.OperationTypeEnum.UPDATE);
+
+ // setup
+ SubscriptionTopic.SubscriptionTopicResourceTriggerComponent trigger = new SubscriptionTopic.SubscriptionTopicResourceTriggerComponent();
+ trigger.setId("1");
+ trigger.setResource("Encounter");
+ trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.UPDATE);
+ trigger.setFhirPathCriteria("%current.id");
+
+ // run
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
+ InMemoryMatchResult result = svc.match();
+
+ // verify
+ assertFalse(result.matched());
+ assertEquals("FhirPath evaluation criteria '%current.id' from Subscription topic: '1' resulted in a non-boolean result: 'org.hl7.fhir.r5.model.IdType'", result.getUnsupportedReason());
+ }
+
+ @Test
+ public void testValidFhirPathReturningCollection() {
+ ResourceModifiedMessage msg = new ResourceModifiedMessage(ourFhirContext, myEncounter, ResourceModifiedMessage.OperationTypeEnum.UPDATE);
+
+ // setup
+ SubscriptionTopic.SubscriptionTopicResourceTriggerComponent trigger = new SubscriptionTopic.SubscriptionTopicResourceTriggerComponent();
+ trigger.setId("1");
+ trigger.setResource("Encounter");
+ trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.UPDATE);
+ trigger.setFhirPathCriteria("%current | %previous");
+
+ IFhirResourceDao mockEncounterDao = mock(IFhirResourceDao.class);
+ when(myDaoRegistry.getResourceDao("Encounter")).thenReturn(mockEncounterDao);
+ Encounter encounterPreviousVersion = new Encounter();
+ when(mockEncounterDao.read(any(), any(), eq(false))).thenReturn(encounterPreviousVersion);
+
+ // run
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
+ InMemoryMatchResult result = svc.match();
+
+ // verify
+ assertFalse(result.matched());
+ assertEquals("FhirPath evaluation criteria '%current | %previous' from Subscription topic: '1' resulted in '2' results. Expected 1.", result.getUnsupportedReason());
+ }
+
+ @Test
+ public void testUpdateWithPrevCriteriaMatchAndFailingFhirPathCriteria() {
+ ResourceModifiedMessage msg = new ResourceModifiedMessage(ourFhirContext, myEncounter, ResourceModifiedMessage.OperationTypeEnum.UPDATE);
+
+ // setup
+ SubscriptionTopic.SubscriptionTopicResourceTriggerComponent trigger = new SubscriptionTopic.SubscriptionTopicResourceTriggerComponent();
+ trigger.setResource("Encounter");
+ trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.UPDATE);
+ trigger.getQueryCriteria().setPrevious("Encounter?status=in-progress");
+ trigger.setFhirPathCriteria("random text");
+
+
+ IFhirResourceDao mockEncounterDao = mock(IFhirResourceDao.class);
+ when(myDaoRegistry.getResourceDao("Encounter")).thenReturn(mockEncounterDao);
+ Encounter encounterPreviousVersion = new Encounter();
+ when(mockEncounterDao.read(any(), any(), eq(false))).thenReturn(encounterPreviousVersion);
+ when(mySearchParamMatcher.match(any(), any(), any())).thenReturn(InMemoryMatchResult.successfulMatch());
+
+ // run
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
+ InMemoryMatchResult result = svc.match();
+
+ // verify
+ assertFalse(result.matched());
+ assertEquals("Error @1, 2: Premature ExpressionNode termination at unexpected token \"text\"", result.getUnsupportedReason());
+ }
+
+ @Test
+ public void testUpdateWithPrevCriteriaMatchAndFhirPathCriteriaUsingPreviousVersion() {
+ myEncounter.setStatus(Enumerations.EncounterStatus.INPROGRESS);
+ ResourceModifiedMessage msg = new ResourceModifiedMessage(ourFhirContext, myEncounter, ResourceModifiedMessage.OperationTypeEnum.UPDATE);
+
+ // setup
+ SubscriptionTopic.SubscriptionTopicResourceTriggerComponent trigger = new SubscriptionTopic.SubscriptionTopicResourceTriggerComponent();
+ trigger.setResource("Encounter");
+ trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.UPDATE);
+ trigger.getQueryCriteria().setPrevious("Encounter?status=in-progress");
+ trigger.setFhirPathCriteria("%current.status='in-progress' and %previous.status.exists().not()");
+
+
+ IFhirResourceDao mockEncounterDao = mock(IFhirResourceDao.class);
+ when(myDaoRegistry.getResourceDao("Encounter")).thenReturn(mockEncounterDao);
+ Encounter encounterPreviousVersion = new Encounter();
+ when(mockEncounterDao.read(any(), any(), eq(false))).thenReturn(encounterPreviousVersion);
+ when(mySearchParamMatcher.match(any(), any(), any())).thenReturn(InMemoryMatchResult.successfulMatch());
+
+ // run
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
+ InMemoryMatchResult result = svc.match();
+
+ // verify
+ assertTrue(result.matched());
+ }
+
+ @Test
+ public void testUpdateOnlyFhirPathCriteriaUsingPreviousVersion() {
+ myEncounter.setStatus(Enumerations.EncounterStatus.INPROGRESS);
+ ResourceModifiedMessage msg = new ResourceModifiedMessage(ourFhirContext, myEncounter, ResourceModifiedMessage.OperationTypeEnum.UPDATE);
+
+ // setup
+ SubscriptionTopic.SubscriptionTopicResourceTriggerComponent trigger = new SubscriptionTopic.SubscriptionTopicResourceTriggerComponent();
+ trigger.setResource("Encounter");
+ trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.UPDATE);
+ trigger.setFhirPathCriteria("%current.status='in-progress' and %previous.status.exists().not()");
+
+
+ IFhirResourceDao mockEncounterDao = mock(IFhirResourceDao.class);
+ when(myDaoRegistry.getResourceDao("Encounter")).thenReturn(mockEncounterDao);
+ Encounter encounterPreviousVersion = new Encounter();
+ when(mockEncounterDao.read(any(), any(), eq(false))).thenReturn(encounterPreviousVersion);
+
+ // run
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
+ InMemoryMatchResult result = svc.match();
+
+ // verify
+ assertTrue(result.matched());
+ }
+
+ @Test
+ public void testCacheUsage() {
+ myEncounter.setStatus(Enumerations.EncounterStatus.INPROGRESS);
+ ResourceModifiedMessage msg = new ResourceModifiedMessage(ourFhirContext, myEncounter, ResourceModifiedMessage.OperationTypeEnum.UPDATE);
+
+ // setup
+ SubscriptionTopic.SubscriptionTopicResourceTriggerComponent trigger = new SubscriptionTopic.SubscriptionTopicResourceTriggerComponent();
+ trigger.setResource("Encounter");
+ trigger.addSupportedInteraction(SubscriptionTopic.InteractionTrigger.UPDATE);
+ String fhirPathCriteria = "%current.status='in-progress'";
+ trigger.setFhirPathCriteria(fhirPathCriteria);
+
+
+ IFhirResourceDao mockEncounterDao = mock(IFhirResourceDao.class);
+ when(myDaoRegistry.getResourceDao("Encounter")).thenReturn(mockEncounterDao);
+
+ assertNull(myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.FHIRPATH_EXPRESSION, fhirPathCriteria));
+
+ // run
+ SubscriptionTriggerMatcher svc = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, msg, trigger, myMemoryCacheService);
+ InMemoryMatchResult result = svc.match();
+
+
+ // verify
+ assertTrue(result.matched());
+ assertNotNull(myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.FHIRPATH_EXPRESSION, fhirPathCriteria));
+ }
}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java
index dbe59938f7e..8df91adfa23 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java
@@ -77,6 +77,7 @@ public class MemoryCacheService {
case HISTORY_COUNT:
case TAG_DEFINITION:
case RESOURCE_CONDITIONAL_CREATE_VERSION:
+ case FHIRPATH_EXPRESSION:
default:
timeoutSeconds = SECONDS.convert(1, MINUTES);
maximumSize = 10000;
@@ -193,6 +194,7 @@ public class MemoryCacheService {
TAG_DEFINITION(TagDefinitionCacheKey.class),
RESOURCE_LOOKUP(String.class),
FORCED_ID_TO_PID(String.class),
+ FHIRPATH_EXPRESSION(String.class),
/**
* Key type: {@literal Long}
* Value type: {@literal Optional}
diff --git a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java
index d0d33f403f5..a05a8efc37e 100644
--- a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java
+++ b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java
@@ -19,8 +19,10 @@ import org.hl7.fhir.r5.model.Base;
import org.hl7.fhir.r5.model.IdType;
import org.hl7.fhir.r5.model.ValueSet;
+import java.util.Collections;
import java.util.List;
import java.util.Optional;
+import java.util.stream.Collectors;
public class FhirPathR5 implements IFhirPath {
@@ -99,7 +101,11 @@ public class FhirPathR5 implements IFhirPath {
boolean beforeContext,
boolean explicitConstant)
throws PathEngineException {
- return null;
+
+ return Collections.unmodifiableList(
+ theEvaluationContext.resolveConstant(appContext, name, beforeContext).stream()
+ .map(Base.class::cast)
+ .collect(Collectors.toList()));
}
@Override
From bce031345828a4ab198bdc91b000a219dfd90c3f Mon Sep 17 00:00:00 2001
From: Tadgh
Date: Fri, 21 Jun 2024 17:36:00 -0700
Subject: [PATCH 09/19] 6034 poor index generation on sql server (#6035)
* Changelog
* Remove uniqueness
* spotless
* Add migrations
* spotless
* make some space
* Handle fancier upgrade
* spotless
* Fix up where clause generation to be a bit more readable
* spotless
---
.../7_4_0/6034-sql-server-index-creation.yaml | 4 +++
.../tasks/HapiFhirJpaMigrationTasks.java | 32 +++++++++++++++++++
.../entity/ResourceIndexedSearchParamUri.java | 7 ++--
.../jpa/migrate/taskdef/AddIndexTask.java | 29 +++++++++++++----
.../fhir/jpa/migrate/tasks/api/Builder.java | 25 +++++++++++++++
.../migrate/tasks/api/ColumnAndNullable.java | 22 +++++++++++++
6 files changed, 107 insertions(+), 12 deletions(-)
create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6034-sql-server-index-creation.yaml
create mode 100644 hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/ColumnAndNullable.java
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6034-sql-server-index-creation.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6034-sql-server-index-creation.yaml
new file mode 100644
index 00000000000..3377deb1420
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6034-sql-server-index-creation.yaml
@@ -0,0 +1,4 @@
+---
+type: fix
+issue: 6034
+title: "Two indexes introduced in HAPI-FHIR 6.6.0, `IDX_SP_URI_HASH_IDENTITY_V2` and `IDX_SP_URI_HASH_URI_V2` were previously created as unique indexes. This has caused issues on SQL Server due to the way that a filtered index is created. The unique clause was not necessary to this index, and has been removed."
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
index ae4e9f5f9a0..10b58c19b22 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
@@ -32,6 +32,7 @@ import ca.uhn.fhir.jpa.migrate.taskdef.ForceIdMigrationCopyTask;
import ca.uhn.fhir.jpa.migrate.taskdef.ForceIdMigrationFixTask;
import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
+import ca.uhn.fhir.jpa.migrate.tasks.api.ColumnAndNullable;
import ca.uhn.fhir.jpa.migrate.tasks.api.TaskFlagEnum;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
@@ -348,6 +349,37 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
}
+
+ {
+ // Note that these are recreations of a previous migration from 6.6.0. The original migration had these set
+ // as unique,
+ // which causes SQL Server to create a filtered index. See
+ // https://www.sqlshack.com/introduction-to-sql-server-filtered-indexes/
+ // What this means for hibernate search is that for any column that is nullable, the SQLServerDialect will
+ // omit the whole row from the index if
+ // the value of the nullable column is actually null. Removing the uniqueness constraint works around this
+ // problem.
+ Builder.BuilderWithTableName uriTable = version.onTable("HFJ_SPIDX_URI");
+
+ uriTable.dropIndex("20240620.10", "IDX_SP_URI_HASH_URI_V2");
+ uriTable.dropIndex("20240620.20", "IDX_SP_URI_HASH_IDENTITY_V2");
+
+ uriTable.addIndex("20240620.30", "IDX_SP_URI_HASH_URI_V2")
+ .unique(false)
+ .online(true)
+ .withPossibleNullableColumns(
+ new ColumnAndNullable("HASH_URI", true),
+ new ColumnAndNullable("RES_ID", false),
+ new ColumnAndNullable("PARTITION_ID", true));
+ uriTable.addIndex("20240620.40", "IDX_SP_URI_HASH_IDENTITY_V2")
+ .unique(false)
+ .online(true)
+ .withPossibleNullableColumns(
+ new ColumnAndNullable("HASH_IDENTITY", true),
+ new ColumnAndNullable("SP_URI", true),
+ new ColumnAndNullable("RES_ID", false),
+ new ColumnAndNullable("PARTITION_ID", true));
+ }
}
protected void init720() {
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java
index 02a5f23a16f..f7fb75439a8 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java
@@ -54,12 +54,9 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
name = "HFJ_SPIDX_URI",
indexes = {
// for queries
- @Index(name = "IDX_SP_URI_HASH_URI_V2", columnList = "HASH_URI,RES_ID,PARTITION_ID", unique = true),
+ @Index(name = "IDX_SP_URI_HASH_URI_V2", columnList = "HASH_URI,RES_ID,PARTITION_ID"),
// for sorting
- @Index(
- name = "IDX_SP_URI_HASH_IDENTITY_V2",
- columnList = "HASH_IDENTITY,SP_URI,RES_ID,PARTITION_ID",
- unique = true),
+ @Index(name = "IDX_SP_URI_HASH_IDENTITY_V2", columnList = "HASH_IDENTITY,SP_URI,RES_ID,PARTITION_ID"),
// for index create/delete
@Index(name = "IDX_SP_URI_COORDS", columnList = "RES_ID")
})
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIndexTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIndexTask.java
index e7e3b079e2b..2251e19b2a8 100644
--- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIndexTask.java
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIndexTask.java
@@ -34,6 +34,7 @@ import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Set;
+import java.util.stream.Collectors;
public class AddIndexTask extends BaseTableTask {
@@ -41,6 +42,7 @@ public class AddIndexTask extends BaseTableTask {
private String myIndexName;
private List myColumns;
+ private List myNullableColumns;
private Boolean myUnique;
private List myIncludeColumns = Collections.emptyList();
/** Should the operation avoid taking a lock on the table */
@@ -64,6 +66,14 @@ public class AddIndexTask extends BaseTableTask {
myUnique = theUnique;
}
+ public List getNullableColumns() {
+ return myNullableColumns;
+ }
+
+ public void setNullableColumns(List theNullableColumns) {
+ this.myNullableColumns = theNullableColumns;
+ }
+
@Override
public void validate() {
super.validate();
@@ -171,14 +181,15 @@ public class AddIndexTask extends BaseTableTask {
@Nonnull
private String buildMSSqlNotNullWhereClause() {
- String mssqlWhereClause;
- mssqlWhereClause = " WHERE (";
- for (int i = 0; i < myColumns.size(); i++) {
- mssqlWhereClause += myColumns.get(i) + " IS NOT NULL ";
- if (i < myColumns.size() - 1) {
- mssqlWhereClause += "AND ";
- }
+ String mssqlWhereClause = "";
+ if (myNullableColumns == null || myNullableColumns.isEmpty()) {
+ return mssqlWhereClause;
}
+
+ mssqlWhereClause = " WHERE (";
+ mssqlWhereClause += myNullableColumns.stream()
+ .map(column -> column + " IS NOT NULL ")
+ .collect(Collectors.joining("AND"));
mssqlWhereClause += ")";
return mssqlWhereClause;
}
@@ -187,6 +198,10 @@ public class AddIndexTask extends BaseTableTask {
setColumns(Arrays.asList(theColumns));
}
+ public void setNullableColumns(String... theColumns) {
+ setNullableColumns(Arrays.asList(theColumns));
+ }
+
public void setIncludeColumns(String... theIncludeColumns) {
setIncludeColumns(Arrays.asList(theIncludeColumns));
}
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java
index 7639e486a8d..3f5657fba18 100644
--- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java
@@ -405,6 +405,31 @@ public class Builder {
return new BuilderCompleteTask(task);
}
+ /**
+ * THis is strictly needed for SQL Server, as it will create filtered indexes on nullable columns, and we have to build a tail clause which matches what the SQL Server Hibernate dialect does.
+ */
+ public BuilderCompleteTask withPossibleNullableColumns(ColumnAndNullable... theColumns) {
+ String[] columnNames = Arrays.stream(theColumns)
+ .map(ColumnAndNullable::getColumnName)
+ .toArray(String[]::new);
+ String[] nullableColumnNames = Arrays.stream(theColumns)
+ .filter(ColumnAndNullable::isNullable)
+ .map(ColumnAndNullable::getColumnName)
+ .toArray(String[]::new);
+ AddIndexTask task = new AddIndexTask(myRelease, myVersion);
+ task.setTableName(myTableName);
+ task.setIndexName(myIndexName);
+ task.setUnique(myUnique);
+ task.setColumns(columnNames);
+ task.setNullableColumns(nullableColumnNames);
+ task.setOnline(myOnline);
+ if (myIncludeColumns != null) {
+ task.setIncludeColumns(myIncludeColumns);
+ }
+ addTask(task);
+ return new BuilderCompleteTask(task);
+ }
+
public BuilderAddIndexUnique includeColumns(String... theIncludeColumns) {
myIncludeColumns = theIncludeColumns;
return this;
diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/ColumnAndNullable.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/ColumnAndNullable.java
new file mode 100644
index 00000000000..ea89dccfd0e
--- /dev/null
+++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/ColumnAndNullable.java
@@ -0,0 +1,22 @@
+package ca.uhn.fhir.jpa.migrate.tasks.api;
+
+/**
+ * Simple data class for holding information about a column, and whether it was nullable at time of writing this migration.
+ */
+public class ColumnAndNullable {
+ private final String myColumnName;
+ private final boolean myNullable;
+
+ public ColumnAndNullable(String myColumnName, boolean myNullable) {
+ this.myColumnName = myColumnName;
+ this.myNullable = myNullable;
+ }
+
+ public String getColumnName() {
+ return myColumnName;
+ }
+
+ public boolean isNullable() {
+ return myNullable;
+ }
+}
From 11d61a5f72a1e4b0ae5e726eb35c2f945368edf4 Mon Sep 17 00:00:00 2001
From: jdar8 <69840459+jdar8@users.noreply.github.com>
Date: Mon, 24 Jun 2024 08:56:00 -0700
Subject: [PATCH 10/19] Jd 20240614 6010 bundle entry search score missing
(#6011)
* 6010 test
* 6010 add bundle entry search score
* 6010 support other fhir versions for search score
* 6010 changelog
* 6116 mvn spotless
* 6010 address code review comments
---------
Co-authored-by: jdar
---
.../model/api/ResourceMetadataKeyEnum.java | 10 ++++
.../java/ca/uhn/fhir/util/BundleUtil.java | 32 ++++++++----
.../util/bundle/SearchBundleEntryParts.java | 11 +++-
.../6010-add-support-bundle-search-score.yaml | 5 ++
.../rest/server/Dstu2_1BundleFactory.java | 6 +++
.../provider/dstu2/Dstu2BundleFactory.java | 6 +++
.../hapi/rest/server/Dstu3BundleFactory.java | 5 ++
.../dstu2hl7org/Dstu2Hl7OrgBundleFactory.java | 5 ++
.../r4/hapi/rest/server/R4BundleFactory.java | 5 ++
.../uhn/fhir/util/bundle/BundleUtilTest.java | 50 +++++++++++++++++++
.../hapi/rest/server/R4BBundleFactory.java | 5 ++
.../r5/hapi/rest/server/R5BundleFactory.java | 5 ++
12 files changed, 134 insertions(+), 11 deletions(-)
create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6010-add-support-bundle-search-score.yaml
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ResourceMetadataKeyEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ResourceMetadataKeyEnum.java
index 68aa3a4279c..977a0cdf258 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ResourceMetadataKeyEnum.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ResourceMetadataKeyEnum.java
@@ -31,6 +31,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import java.io.Serializable;
+import java.math.BigDecimal;
import java.util.Date;
import java.util.List;
@@ -80,6 +81,15 @@ public abstract class ResourceMetadataKeyEnum implements Serializable {
*/
public static final ResourceMetadataKeyEnum ENTRY_SEARCH_MODE =
new ResourceMetadataKeyEnum<>("ENTRY_SEARCH_MODE", BundleEntrySearchModeEnum.class) {};
+ /**
+ * If present and populated with a decimal value, contains the "bundle entry search score", which is the value of the status field in the Bundle entry containing this resource.
+ * The value for this key corresponds to field Bundle.entry.search.score. This value represents the search ranking score, where 1.0 is relevant and 0.0 is irrelevant.
+ *
+ * Note that status is only used in FHIR DSTU2 and later.
+ *
+ */
+ public static final ResourceMetadataKeyEnum ENTRY_SEARCH_SCORE =
+ new ResourceMetadataKeyEnum<>("ENTRY_SEARCH_SCORE", BigDecimal.class) {};
/**
* If present and populated with a {@link BundleEntryTransactionMethodEnum}, contains the "bundle entry transaction operation", which is the value of the status field in the Bundle entry
* containing this resource. The value for this key corresponds to field Bundle.entry.transaction.operation. This value can be set in resources being transmitted to a server to
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java
index 2317feafc88..6801a25bfd6 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java
@@ -50,6 +50,7 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashSet;
@@ -578,42 +579,53 @@ public class BundleUtil {
BaseRuntimeElementCompositeDefinition> searchChildContentsDef =
(BaseRuntimeElementCompositeDefinition>) searchChildDef.getChildByName("search");
BaseRuntimeChildDefinition searchModeChildDef = searchChildContentsDef.getChildByName("mode");
+ BaseRuntimeChildDefinition searchScoreChildDef = searchChildContentsDef.getChildByName("score");
List retVal = new ArrayList<>();
for (IBase nextEntry : entries) {
SearchBundleEntryParts parts = getSearchBundleEntryParts(
- fullUrlChildDef, resourceChildDef, searchChildDef, searchModeChildDef, nextEntry);
+ fullUrlChildDef,
+ resourceChildDef,
+ searchChildDef,
+ searchModeChildDef,
+ searchScoreChildDef,
+ nextEntry);
retVal.add(parts);
}
return retVal;
}
private static SearchBundleEntryParts getSearchBundleEntryParts(
- BaseRuntimeChildDefinition fullUrlChildDef,
- BaseRuntimeChildDefinition resourceChildDef,
- BaseRuntimeChildDefinition searchChildDef,
- BaseRuntimeChildDefinition searchModeChildDef,
+ BaseRuntimeChildDefinition theFullUrlChildDef,
+ BaseRuntimeChildDefinition theResourceChildDef,
+ BaseRuntimeChildDefinition theSearchChildDef,
+ BaseRuntimeChildDefinition theSearchModeChildDef,
+ BaseRuntimeChildDefinition theSearchScoreChildDef,
IBase entry) {
IBaseResource resource = null;
String matchMode = null;
+ BigDecimal searchScore = null;
- String fullUrl = fullUrlChildDef
+ String fullUrl = theFullUrlChildDef
.getAccessor()
.getFirstValueOrNull(entry)
.map(t -> ((IPrimitiveType>) t).getValueAsString())
.orElse(null);
- for (IBase nextResource : resourceChildDef.getAccessor().getValues(entry)) {
+ for (IBase nextResource : theResourceChildDef.getAccessor().getValues(entry)) {
resource = (IBaseResource) nextResource;
}
- for (IBase nextSearch : searchChildDef.getAccessor().getValues(entry)) {
- for (IBase nextUrl : searchModeChildDef.getAccessor().getValues(nextSearch)) {
+ for (IBase nextSearch : theSearchChildDef.getAccessor().getValues(entry)) {
+ for (IBase nextUrl : theSearchModeChildDef.getAccessor().getValues(nextSearch)) {
matchMode = ((IPrimitiveType>) nextUrl).getValueAsString();
}
+ for (IBase nextUrl : theSearchScoreChildDef.getAccessor().getValues(nextSearch)) {
+ searchScore = (BigDecimal) ((IPrimitiveType>) nextUrl).getValue();
+ }
}
- return new SearchBundleEntryParts(fullUrl, resource, matchMode);
+ return new SearchBundleEntryParts(fullUrl, resource, matchMode, searchScore);
}
/**
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/SearchBundleEntryParts.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/SearchBundleEntryParts.java
index 2f38aaa1597..7687958be81 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/SearchBundleEntryParts.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/SearchBundleEntryParts.java
@@ -22,15 +22,20 @@ package ca.uhn.fhir.util.bundle;
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
import org.hl7.fhir.instance.model.api.IBaseResource;
+import java.math.BigDecimal;
+
public class SearchBundleEntryParts {
private final IBaseResource myResource;
private final BundleEntrySearchModeEnum mySearchMode;
+ private final BigDecimal mySearchScore;
private final String myFullUrl;
- public SearchBundleEntryParts(String theFullUrl, IBaseResource theResource, String theSearchMode) {
+ public SearchBundleEntryParts(
+ String theFullUrl, IBaseResource theResource, String theSearchMode, BigDecimal theSearchScore) {
myFullUrl = theFullUrl;
myResource = theResource;
mySearchMode = BundleEntrySearchModeEnum.forCode(theSearchMode);
+ mySearchScore = theSearchScore;
}
public String getFullUrl() {
@@ -44,4 +49,8 @@ public class SearchBundleEntryParts {
public BundleEntrySearchModeEnum getSearchMode() {
return mySearchMode;
}
+
+ public BigDecimal getSearchScore() {
+ return mySearchScore;
+ }
}
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6010-add-support-bundle-search-score.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6010-add-support-bundle-search-score.yaml
new file mode 100644
index 00000000000..212c77a9e48
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6010-add-support-bundle-search-score.yaml
@@ -0,0 +1,5 @@
+---
+type: add
+issue: 6010
+jira: SMILE-8214
+title: "When populated, the search score field will now be included in the entries of a response Bundle."
diff --git a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/rest/server/Dstu2_1BundleFactory.java b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/rest/server/Dstu2_1BundleFactory.java
index 8eaf848f8f0..1d37f9a53d3 100644
--- a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/rest/server/Dstu2_1BundleFactory.java
+++ b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/rest/server/Dstu2_1BundleFactory.java
@@ -43,6 +43,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
+import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
@@ -148,10 +149,15 @@ public class Dstu2_1BundleFactory implements IVersionSpecificBundleFactory {
}
}
+ // Populate Bundle.entry.search
BundleEntrySearchModeEnum searchMode = ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextAsResource);
if (searchMode != null) {
entry.getSearch().getModeElement().setValueAsString(searchMode.getCode());
}
+ BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(nextAsResource);
+ if (searchScore != null) {
+ entry.getSearch().getScoreElement().setValue(searchScore);
+ }
}
/*
diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2/Dstu2BundleFactory.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2/Dstu2BundleFactory.java
index b78b8d7f3c5..30bfb849b87 100644
--- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2/Dstu2BundleFactory.java
+++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2/Dstu2BundleFactory.java
@@ -41,6 +41,7 @@ import jakarta.annotation.Nonnull;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
+import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
@@ -138,10 +139,15 @@ public class Dstu2BundleFactory implements IVersionSpecificBundleFactory {
}
populateBundleEntryFullUrl(next, entry);
+ // Populate Bundle.entry.search
BundleEntrySearchModeEnum searchMode = ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(next);
if (searchMode != null) {
entry.getSearch().getModeElement().setValue(searchMode.getCode());
}
+ BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(next);
+ if (searchScore != null) {
+ entry.getSearch().getScoreElement().setValue(searchScore);
+ }
}
/*
diff --git a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/rest/server/Dstu3BundleFactory.java b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/rest/server/Dstu3BundleFactory.java
index af435eefe5f..49578c7c5db 100644
--- a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/rest/server/Dstu3BundleFactory.java
+++ b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/rest/server/Dstu3BundleFactory.java
@@ -44,6 +44,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
+import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
@@ -173,6 +174,10 @@ public class Dstu3BundleFactory implements IVersionSpecificBundleFactory {
if (searchMode != null) {
entry.getSearch().getModeElement().setValueAsString(searchMode.getCode());
}
+ BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(nextAsResource);
+ if (searchScore != null) {
+ entry.getSearch().getScoreElement().setValue(searchScore);
+ }
}
/*
diff --git a/hapi-fhir-structures-hl7org-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2hl7org/Dstu2Hl7OrgBundleFactory.java b/hapi-fhir-structures-hl7org-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2hl7org/Dstu2Hl7OrgBundleFactory.java
index 90b50bea114..0deb4a5c6ce 100644
--- a/hapi-fhir-structures-hl7org-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2hl7org/Dstu2Hl7OrgBundleFactory.java
+++ b/hapi-fhir-structures-hl7org-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2hl7org/Dstu2Hl7OrgBundleFactory.java
@@ -45,6 +45,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
+import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
@@ -178,6 +179,10 @@ public class Dstu2Hl7OrgBundleFactory implements IVersionSpecificBundleFactory {
if (searchMode != null) {
entry.getSearch().getModeElement().setValueAsString(searchMode.getCode());
}
+ BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(nextAsResource);
+ if (searchScore != null) {
+ entry.getSearch().getScoreElement().setValue(searchScore);
+ }
}
/*
diff --git a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/rest/server/R4BundleFactory.java b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/rest/server/R4BundleFactory.java
index 73cd64314b1..d279f87cd44 100644
--- a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/rest/server/R4BundleFactory.java
+++ b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/rest/server/R4BundleFactory.java
@@ -45,6 +45,7 @@ import org.hl7.fhir.r4.model.DomainResource;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Resource;
+import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
@@ -181,6 +182,10 @@ public class R4BundleFactory implements IVersionSpecificBundleFactory {
if (searchMode != null) {
entry.getSearch().getModeElement().setValueAsString(searchMode.getCode());
}
+ BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(nextAsResource);
+ if (searchScore != null) {
+ entry.getSearch().getScoreElement().setValue(searchScore);
+ }
}
/*
diff --git a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/bundle/BundleUtilTest.java b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/bundle/BundleUtilTest.java
index 2fb32b59b3f..f349ffa457d 100644
--- a/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/bundle/BundleUtilTest.java
+++ b/hapi-fhir-structures-r4/src/test/java/ca/uhn/fhir/util/bundle/BundleUtilTest.java
@@ -30,6 +30,7 @@ import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
+import java.math.BigDecimal;
import java.util.Collections;
import java.util.List;
import java.util.function.Consumer;
@@ -550,6 +551,55 @@ public class BundleUtilTest {
assertNotNull(searchBundleEntryParts.get(0).getResource());
}
+ @Test
+ public void testConvertingToSearchBundleEntryPartsReturnsScore() {
+
+ //Given
+ String bundleString = """
+ {
+ "resourceType": "Bundle",
+ "id": "bd194b7f-ac1e-429a-a206-ee2c470f23b5",
+ "type": "searchset",
+ "total": 1,
+ "link": [
+ {
+ "relation": "self",
+ "url": "http://localhost:8000/Condition?_count=1"
+ }
+ ],
+ "entry": [
+ {
+ "fullUrl": "http://localhost:8000/Condition/1626",
+ "resource": {
+ "resourceType": "Condition",
+ "id": "1626",
+ "identifier": [
+ {
+ "system": "urn:hssc:musc:conditionid",
+ "value": "1064115000.1.5"
+ }
+ ]
+ },
+ "search": {
+ "mode": "match",
+ "score": 1
+ }
+ }
+ ]
+ }""";
+ Bundle bundle = ourCtx.newJsonParser().parseResource(Bundle.class, bundleString);
+
+ //When
+ List searchBundleEntryParts = BundleUtil.getSearchBundleEntryParts(ourCtx, bundle);
+
+ //Then
+ assertThat(searchBundleEntryParts).hasSize(1);
+ assertEquals(BundleEntrySearchModeEnum.MATCH, searchBundleEntryParts.get(0).getSearchMode());
+ assertEquals(new BigDecimal(1), searchBundleEntryParts.get(0).getSearchScore());
+ assertThat(searchBundleEntryParts.get(0).getFullUrl()).contains("Condition/1626");
+ assertNotNull(searchBundleEntryParts.get(0).getResource());
+ }
+
@Test
public void testTransactionSorterReturnsDeletesInCorrectProcessingOrder() {
Bundle b = new Bundle();
diff --git a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/rest/server/R4BBundleFactory.java b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/rest/server/R4BBundleFactory.java
index 01933a7e756..b9f912450d1 100644
--- a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/rest/server/R4BBundleFactory.java
+++ b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/rest/server/R4BBundleFactory.java
@@ -44,6 +44,7 @@ import org.hl7.fhir.r4b.model.DomainResource;
import org.hl7.fhir.r4b.model.IdType;
import org.hl7.fhir.r4b.model.Resource;
+import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
@@ -177,6 +178,10 @@ public class R4BBundleFactory implements IVersionSpecificBundleFactory {
if (searchMode != null) {
entry.getSearch().getModeElement().setValueAsString(searchMode.getCode());
}
+ BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(nextAsResource);
+ if (searchScore != null) {
+ entry.getSearch().getScoreElement().setValue(searchScore);
+ }
}
/*
diff --git a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/rest/server/R5BundleFactory.java b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/rest/server/R5BundleFactory.java
index 8b974912923..aae865bd163 100644
--- a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/rest/server/R5BundleFactory.java
+++ b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/rest/server/R5BundleFactory.java
@@ -44,6 +44,7 @@ import org.hl7.fhir.r5.model.DomainResource;
import org.hl7.fhir.r5.model.IdType;
import org.hl7.fhir.r5.model.Resource;
+import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
@@ -177,6 +178,10 @@ public class R5BundleFactory implements IVersionSpecificBundleFactory {
if (searchMode != null) {
entry.getSearch().getModeElement().setValueAsString(searchMode.getCode());
}
+ BigDecimal searchScore = ResourceMetadataKeyEnum.ENTRY_SEARCH_SCORE.get(nextAsResource);
+ if (searchScore != null) {
+ entry.getSearch().getScoreElement().setValue(searchScore);
+ }
}
/*
From d4e3698f37b4e3095cb9722c8cedff2cf8f48ae8 Mon Sep 17 00:00:00 2001
From: Luke deGruchy
Date: Tue, 25 Jun 2024 12:11:47 -0400
Subject: [PATCH 11/19] Handle conditional creates across partitions with
duplicate search URLs by updating ResourceSearchUrlEntity (#5983)
* First commit.
* SQLs in test class work for Postgres.
* Refine DropPrimaryKeyTask. Add migration code to do all operations except drop and add primary key. Comment out stub test code to do all non drop and add primary key operations. Add verification code to test to verify schema and record counts.
* Oracle drop primary key working.
* Oracle drop primary key working in test code. Proper drop primary and add primary key tasks working in H2.
* Migration tests work on all 4 databases.
* Spotless.
* Fix compile error.
* Add hapi error codes for new Exceptions.
* Add another hapi error code.
* Cleanup TODOs. Fix error codes. Spotless.
* Add new partition conditional create test. Fix bug from merging tasks class. Add new unit Add changelog without hapi-fhir issue.
* Add test for conditional create with partitions.
* Fix TODOs. Reverse unneeded changes. Rename changelog. Refinements. Javadoc.
* Tweak changelog.
* Rename config key internally. Spotless.
* Fix a newly introduced bug. Cleanup. Tweak changelog.
* Small fix. Spotless.
* Fix compile error.
* Spotless.
* Address code review comments.
* Address more code review comments.
* Fix test.
* Fix test.
---
...33-search-url-with-partition-id-in-pk.yaml | 7 +
.../ca/uhn/fhir/jpa/config/JpaConfig.java | 7 +-
.../tasks/HapiFhirJpaMigrationTasks.java | 26 ++-
.../fhir/jpa/search/ResourceSearchUrlSvc.java | 12 +-
.../jpa/model/config/PartitionSettings.java | 10 ++
.../model/entity/ResourceSearchUrlEntity.java | 46 +++++-
.../entity/ResourceSearchUrlEntityPK.java | 118 ++++++++++++++
...FhirResourceDaoR4ConcurrentCreateTest.java | 15 +-
.../dao/r4/FhirResourceDaoR4CreateTest.java | 68 ++++++++
.../jpa/embedded/JpaEmbeddedDatabase.java | 3 +-
.../jpa/embedded/HapiSchemaMigrationTest.java | 154 +++++++++++++++++-
.../jpa/migrate/taskdef/AddColumnTask.java | 15 +-
.../migrate/taskdef/AddPrimaryKeyTask.java | 78 +++++++++
.../taskdef/BaseTableColumnTypeTask.java | 21 +++
.../fhir/jpa/migrate/taskdef/BaseTask.java | 43 ++++-
.../migrate/taskdef/DropPrimaryKeyTask.java | 151 +++++++++++++++++
.../fhir/jpa/migrate/tasks/api/Builder.java | 29 +++-
17 files changed, 770 insertions(+), 33 deletions(-)
create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6033-search-url-with-partition-id-in-pk.yaml
create mode 100644 hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceSearchUrlEntityPK.java
create mode 100644 hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddPrimaryKeyTask.java
create mode 100644 hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropPrimaryKeyTask.java
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6033-search-url-with-partition-id-in-pk.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6033-search-url-with-partition-id-in-pk.yaml
new file mode 100644
index 00000000000..df97d120be6
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_0/6033-search-url-with-partition-id-in-pk.yaml
@@ -0,0 +1,7 @@
+---
+type: fix
+issue: 6033
+jira: SMILE-8429
+title: "Previously, attempting to store resources with common identifies but different partitions would.
+ This has been fixed by adding a new configuration key defaulting to false to allow storing resources with duplicate identifiers across partitions.
+ This new feature can be activated by calling PartitionSettings.setConditionalCreateDuplicateIdentifiersEnabled()"
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
index 67a7f55d28e..fb78410c64a 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
@@ -80,6 +80,7 @@ import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices;
import ca.uhn.fhir.jpa.interceptor.OverridePathBasedReferentialIntegrityForDeletesInterceptor;
import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.packages.IHapiPackageCacheManager;
@@ -857,12 +858,14 @@ public class JpaConfig {
PersistenceContextProvider thePersistenceContextProvider,
IResourceSearchUrlDao theResourceSearchUrlDao,
MatchUrlService theMatchUrlService,
- FhirContext theFhirContext) {
+ FhirContext theFhirContext,
+ PartitionSettings thePartitionSettings) {
return new ResourceSearchUrlSvc(
thePersistenceContextProvider.getEntityManager(),
theResourceSearchUrlDao,
theMatchUrlService,
- theFhirContext);
+ theFhirContext,
+ thePartitionSettings);
}
@Bean
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
index 10b58c19b22..1c358a07123 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
@@ -128,7 +128,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
protected void init740() {
// Start of migrations from 7.2 to 7.4
- Builder version = forVersion(VersionEnum.V7_4_0);
+ final Builder version = forVersion(VersionEnum.V7_4_0);
{
version.onTable("HFJ_RES_SEARCH_URL")
@@ -348,6 +348,30 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
.nullable()
.withType(ColumnTypeEnum.STRING, 100)
.failureAllowed();
+
+ {
+ // Please see https://github.com/hapifhir/hapi-fhir/issues/6033 for why we're doing this
+ version.onTable("HFJ_RES_SEARCH_URL")
+ .addColumn("20240618.2", "PARTITION_ID", -1)
+ .nullable()
+ .type(ColumnTypeEnum.INT);
+
+ version.onTable("HFJ_RES_SEARCH_URL")
+ .addColumn("20240618.3", "PARTITION_DATE")
+ .nullable()
+ .type(ColumnTypeEnum.DATE_ONLY);
+
+ version.executeRawSql("20240618.4", "UPDATE HFJ_RES_SEARCH_URL SET PARTITION_ID = -1");
+
+ version.onTable("HFJ_RES_SEARCH_URL")
+ .modifyColumn("20240618.5", "PARTITION_ID")
+ .nonNullable()
+ .withType(ColumnTypeEnum.INT);
+
+ version.onTable("HFJ_RES_SEARCH_URL").dropPrimaryKey("20240618.6");
+
+ version.onTable("HFJ_RES_SEARCH_URL").addPrimaryKey("20240618.7", "RES_SEARCH_URL", "PARTITION_ID");
+ }
}
{
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java
index 7eaf6e6ecfc..d4e336ab870 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java
@@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.search;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
@@ -51,16 +52,19 @@ public class ResourceSearchUrlSvc {
private final MatchUrlService myMatchUrlService;
private final FhirContext myFhirContext;
+ private final PartitionSettings myPartitionSettings;
public ResourceSearchUrlSvc(
EntityManager theEntityManager,
IResourceSearchUrlDao theResourceSearchUrlDao,
MatchUrlService theMatchUrlService,
- FhirContext theFhirContext) {
+ FhirContext theFhirContext,
+ PartitionSettings thePartitionSettings) {
myEntityManager = theEntityManager;
myResourceSearchUrlDao = theResourceSearchUrlDao;
myMatchUrlService = theMatchUrlService;
myFhirContext = theFhirContext;
+ myPartitionSettings = thePartitionSettings;
}
/**
@@ -87,8 +91,10 @@ public class ResourceSearchUrlSvc {
String theResourceName, String theMatchUrl, ResourceTable theResourceTable) {
String canonicalizedUrlForStorage = createCanonicalizedUrlForStorage(theResourceName, theMatchUrl);
- ResourceSearchUrlEntity searchUrlEntity =
- ResourceSearchUrlEntity.from(canonicalizedUrlForStorage, theResourceTable);
+ ResourceSearchUrlEntity searchUrlEntity = ResourceSearchUrlEntity.from(
+ canonicalizedUrlForStorage,
+ theResourceTable,
+ myPartitionSettings.isConditionalCreateDuplicateIdentifiersEnabled());
// calling dao.save performs a merge operation which implies a trip to
// the database to see if the resource exists. Since we don't need the check, we avoid the trip by calling
// em.persist.
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java
index e12eb5cfd8c..f077d3baf0e 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java
@@ -32,6 +32,7 @@ public class PartitionSettings {
private boolean myUnnamedPartitionMode;
private Integer myDefaultPartitionId;
private boolean myAlwaysOpenNewTransactionForDifferentPartition;
+ private boolean myConditionalCreateDuplicateIdentifiersEnabled = false;
/**
* Should we always open a new database transaction if the partition context changes
@@ -171,6 +172,15 @@ public class PartitionSettings {
PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED);
}
+ public boolean isConditionalCreateDuplicateIdentifiersEnabled() {
+ return myConditionalCreateDuplicateIdentifiersEnabled;
+ }
+
+ public void setConditionalCreateDuplicateIdentifiersEnabled(
+ boolean theConditionalCreateDuplicateIdentifiersEnabled) {
+ myConditionalCreateDuplicateIdentifiersEnabled = theConditionalCreateDuplicateIdentifiersEnabled;
+ }
+
public enum CrossPartitionReferenceMode {
/**
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceSearchUrlEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceSearchUrlEntity.java
index 0adb60dd3e2..e307606efde 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceSearchUrlEntity.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceSearchUrlEntity.java
@@ -20,10 +20,10 @@
package ca.uhn.fhir.jpa.model.entity;
import jakarta.persistence.Column;
+import jakarta.persistence.EmbeddedId;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.ForeignKey;
-import jakarta.persistence.Id;
import jakarta.persistence.Index;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
@@ -31,7 +31,9 @@ import jakarta.persistence.Table;
import jakarta.persistence.Temporal;
import jakarta.persistence.TemporalType;
+import java.time.LocalDate;
import java.util.Date;
+import java.util.Optional;
/**
* This entity is used to enforce uniqueness on a given search URL being
@@ -52,12 +54,12 @@ import java.util.Date;
public class ResourceSearchUrlEntity {
public static final String RES_SEARCH_URL_COLUMN_NAME = "RES_SEARCH_URL";
+ public static final String PARTITION_ID = "PARTITION_ID";
public static final int RES_SEARCH_URL_LENGTH = 768;
- @Id
- @Column(name = RES_SEARCH_URL_COLUMN_NAME, length = RES_SEARCH_URL_LENGTH, nullable = false)
- private String mySearchUrl;
+ @EmbeddedId
+ private ResourceSearchUrlEntityPK myPk;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(
@@ -70,17 +72,35 @@ public class ResourceSearchUrlEntity {
@Column(name = "RES_ID", updatable = false, nullable = false, insertable = false)
private Long myResourcePid;
+ @Column(name = "PARTITION_DATE", nullable = true, insertable = true, updatable = false)
+ private LocalDate myPartitionDate;
+
@Column(name = "CREATED_TIME", nullable = false)
@Temporal(TemporalType.TIMESTAMP)
private Date myCreatedTime;
- public static ResourceSearchUrlEntity from(String theUrl, ResourceTable theResourceTable) {
+ public static ResourceSearchUrlEntity from(
+ String theUrl, ResourceTable theResourceTable, boolean theSearchUrlDuplicateAcrossPartitionsEnabled) {
+
return new ResourceSearchUrlEntity()
+ .setPk(ResourceSearchUrlEntityPK.from(
+ theUrl, theResourceTable, theSearchUrlDuplicateAcrossPartitionsEnabled))
+ .setPartitionDate(Optional.ofNullable(theResourceTable.getPartitionId())
+ .map(PartitionablePartitionId::getPartitionDate)
+ .orElse(null))
.setResourceTable(theResourceTable)
- .setSearchUrl(theUrl)
.setCreatedTime(new Date());
}
+ public ResourceSearchUrlEntityPK getPk() {
+ return myPk;
+ }
+
+ public ResourceSearchUrlEntity setPk(ResourceSearchUrlEntityPK thePk) {
+ myPk = thePk;
+ return this;
+ }
+
public Long getResourcePid() {
if (myResourcePid != null) {
return myResourcePid;
@@ -112,11 +132,19 @@ public class ResourceSearchUrlEntity {
}
public String getSearchUrl() {
- return mySearchUrl;
+ return myPk.getSearchUrl();
}
- public ResourceSearchUrlEntity setSearchUrl(String theSearchUrl) {
- mySearchUrl = theSearchUrl;
+ public Integer getPartitionId() {
+ return myPk.getPartitionId();
+ }
+
+ public LocalDate getPartitionDate() {
+ return myPartitionDate;
+ }
+
+ public ResourceSearchUrlEntity setPartitionDate(LocalDate thePartitionDate) {
+ myPartitionDate = thePartitionDate;
return this;
}
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceSearchUrlEntityPK.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceSearchUrlEntityPK.java
new file mode 100644
index 00000000000..0ba6850d66d
--- /dev/null
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceSearchUrlEntityPK.java
@@ -0,0 +1,118 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Model
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.jpa.model.entity;
+
+import jakarta.persistence.Column;
+import jakarta.persistence.Embeddable;
+
+import java.io.Serializable;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.StringJoiner;
+
+/**
+ * Multi-column primary Key for {@link ResourceSearchUrlEntity}
+ */
+@Embeddable
+public class ResourceSearchUrlEntityPK implements Serializable {
+ public static final String RES_SEARCH_URL_COLUMN_NAME = "RES_SEARCH_URL";
+ public static final String PARTITION_ID_COLUMN_NAME = "PARTITION_ID";
+
+ public static final int RES_SEARCH_URL_LENGTH = 768;
+
+ private static final long serialVersionUID = 1L;
+
+ private static final int PARTITION_ID_NULL_EQUIVALENT = -1;
+
+ @Column(name = RES_SEARCH_URL_COLUMN_NAME, length = RES_SEARCH_URL_LENGTH, nullable = false)
+ // Weird field name isto ensure that this the first key in the index
+ private String my_A_SearchUrl;
+
+ @Column(name = PARTITION_ID_COLUMN_NAME, nullable = false, insertable = true, updatable = false)
+ // Weird field name isto ensure that this the second key in the index
+ private Integer my_B_PartitionId;
+
+ public ResourceSearchUrlEntityPK() {}
+
+ public static ResourceSearchUrlEntityPK from(
+ String theSearchUrl, ResourceTable theResourceTable, boolean theSearchUrlDuplicateAcrossPartitionsEnabled) {
+ return new ResourceSearchUrlEntityPK(
+ theSearchUrl,
+ computePartitionIdOrNullEquivalent(theResourceTable, theSearchUrlDuplicateAcrossPartitionsEnabled));
+ }
+
+ public ResourceSearchUrlEntityPK(String theSearchUrl, int thePartitionId) {
+ my_A_SearchUrl = theSearchUrl;
+ my_B_PartitionId = thePartitionId;
+ }
+
+ public String getSearchUrl() {
+ return my_A_SearchUrl;
+ }
+
+ public void setSearchUrl(String theMy_A_SearchUrl) {
+ my_A_SearchUrl = theMy_A_SearchUrl;
+ }
+
+ public Integer getPartitionId() {
+ return my_B_PartitionId;
+ }
+
+ public void setPartitionId(Integer theMy_B_PartitionId) {
+ my_B_PartitionId = theMy_B_PartitionId;
+ }
+
+ @Override
+ public boolean equals(Object theO) {
+ if (this == theO) {
+ return true;
+ }
+ if (theO == null || getClass() != theO.getClass()) {
+ return false;
+ }
+ ResourceSearchUrlEntityPK that = (ResourceSearchUrlEntityPK) theO;
+ return Objects.equals(my_A_SearchUrl, that.my_A_SearchUrl)
+ && Objects.equals(my_B_PartitionId, that.my_B_PartitionId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(my_A_SearchUrl, my_B_PartitionId);
+ }
+
+ @Override
+ public String toString() {
+ return new StringJoiner(", ", ResourceSearchUrlEntityPK.class.getSimpleName() + "[", "]")
+ .add("my_A_SearchUrl='" + my_A_SearchUrl + "'")
+ .add("my_B_PartitionId=" + my_B_PartitionId)
+ .toString();
+ }
+
+ private static int computePartitionIdOrNullEquivalent(
+ ResourceTable theTheResourceTable, boolean theTheSearchUrlDuplicateAcrossPartitionsEnabled) {
+ if (!theTheSearchUrlDuplicateAcrossPartitionsEnabled) {
+ return PARTITION_ID_NULL_EQUIVALENT;
+ }
+
+ return Optional.ofNullable(theTheResourceTable.getPartitionId())
+ .map(PartitionablePartitionId::getPartitionId)
+ .orElse(PARTITION_ID_NULL_EQUIVALENT);
+ }
+}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ConcurrentCreateTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ConcurrentCreateTest.java
index fa7376a5483..04c3797f2d4 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ConcurrentCreateTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ConcurrentCreateTest.java
@@ -41,13 +41,14 @@ import java.util.stream.Collectors;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.fail;
-import static org.junit.jupiter.api.Assertions.fail;
public class FhirResourceDaoR4ConcurrentCreateTest extends BaseJpaR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoR4ConcurrentCreateTest.class);
+ private static final boolean IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE = false;
+
ThreadGaterPointcutLatch myThreadGaterPointcutLatchInterceptor;
UserRequestRetryVersionConflictsInterceptor myUserRequestRetryVersionConflictsInterceptor;
ResourceConcurrentSubmitterSvc myResourceConcurrentSubmitterSvc;
@@ -132,12 +133,12 @@ public class FhirResourceDaoR4ConcurrentCreateTest extends BaseJpaR4Test {
final ResourceTable resTable4 = myResourceTableDao.save(createResTable());
Date tooOldBy10Minutes = cutOffTimeMinus(tenMinutes);
- ResourceSearchUrlEntity tooOld1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", resTable1).setCreatedTime(tooOldBy10Minutes);
- ResourceSearchUrlEntity tooOld2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", resTable2).setCreatedTime(tooOldBy10Minutes);
+ ResourceSearchUrlEntity tooOld1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", resTable1, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE).setCreatedTime(tooOldBy10Minutes);
+ ResourceSearchUrlEntity tooOld2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", resTable2, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE).setCreatedTime(tooOldBy10Minutes);
Date tooNewBy10Minutes = cutOffTimePlus(tenMinutes);
- ResourceSearchUrlEntity tooNew1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.446", resTable3).setCreatedTime(tooNewBy10Minutes);
- ResourceSearchUrlEntity tooNew2 =ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.447", resTable4).setCreatedTime(tooNewBy10Minutes);
+ ResourceSearchUrlEntity tooNew1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.446", resTable3, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE).setCreatedTime(tooNewBy10Minutes);
+ ResourceSearchUrlEntity tooNew2 =ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.447", resTable4, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE).setCreatedTime(tooNewBy10Minutes);
myResourceSearchUrlDao.saveAll(asList(tooOld1, tooOld2, tooNew1, tooNew2));
@@ -165,8 +166,8 @@ public class FhirResourceDaoR4ConcurrentCreateTest extends BaseJpaR4Test {
final ResourceTable resTable1 = myResourceTableDao.save(createResTable());
final ResourceTable resTable2 = myResourceTableDao.save(createResTable());
- ResourceSearchUrlEntity entry1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", resTable1);
- ResourceSearchUrlEntity entry2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", resTable2);
+ ResourceSearchUrlEntity entry1 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.444", resTable1, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE);
+ ResourceSearchUrlEntity entry2 = ResourceSearchUrlEntity.from("Observation?identifier=20210427133226.445", resTable2, IS_SEARCH_URL_DUPLICATE_ACROSS_PARTITIONS_ENABLED_FALSE);
myResourceSearchUrlDao.saveAll(asList(entry1, entry2));
// when
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CreateTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CreateTest.java
index ea14d5a6cb2..c09eed9cb62 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CreateTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CreateTest.java
@@ -1,9 +1,11 @@
package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.i18n.Msg;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
+import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
@@ -65,6 +67,7 @@ import org.springframework.transaction.support.TransactionTemplate;
import java.math.BigDecimal;
import java.time.Instant;
+import java.time.LocalDate;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Arrays;
@@ -79,6 +82,7 @@ import java.util.concurrent.Future;
import java.util.stream.Collectors;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertInstanceOf;
@@ -1335,6 +1339,62 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
assertRemainingTasks();
}
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void conditionalCreateSameIdentifierCrossPartition(boolean theIsSearchUrlDuplicateAcrossPartitionsEnabled) {
+ myPartitionSettings.setPartitioningEnabled(true);
+ myPartitionSettings.setConditionalCreateDuplicateIdentifiersEnabled(theIsSearchUrlDuplicateAcrossPartitionsEnabled);
+
+ final PartitionEntity partitionEntity1 = new PartitionEntity();
+ partitionEntity1.setId(1);
+ partitionEntity1.setName("Partition-A");
+ myPartitionDao.save(partitionEntity1);
+
+ final PartitionEntity partitionEntity2 = new PartitionEntity();
+ partitionEntity2.setId(2);
+ partitionEntity2.setName("Partition-B");
+ myPartitionDao.save(partitionEntity2);
+
+ final BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
+ final String matchUrl = "identifier=http://tempuri.org|1";
+ bundleBuilder.addTransactionCreateEntry(myTask1, "urn:uuid:59cda086-4763-4ef0-8e36-8c90058686ea")
+ .conditional(matchUrl);
+
+ final RequestPartitionId requestPartitionId1 = RequestPartitionId.fromPartitionId(1, LocalDate.now());
+ final RequestPartitionId requestPartitionId2 = RequestPartitionId.fromPartitionId(2, LocalDate.now());
+
+ final List responseEntries1 = sendBundleAndGetResponse(bundleBuilder.getBundle(), requestPartitionId1);
+ assertEquals(1, responseEntries1.size());
+ final Bundle.BundleEntryComponent bundleEntry1 = responseEntries1.get(0);
+ assertEquals("201 Created", bundleEntry1.getResponse().getStatus());
+
+ if (!theIsSearchUrlDuplicateAcrossPartitionsEnabled) {
+ final IBaseBundle bundle = bundleBuilder.getBundle();
+ assertThatThrownBy(() -> sendBundleAndGetResponse(bundle, requestPartitionId2)).isInstanceOf(ResourceVersionConflictException.class);
+ return;
+ }
+
+ final List responseEntries2 = sendBundleAndGetResponse(bundleBuilder.getBundle(), requestPartitionId2);
+ assertEquals(1, responseEntries2.size());
+ final Bundle.BundleEntryComponent bundleEntry2 = responseEntries1.get(0);
+ assertEquals("201 Created", bundleEntry2.getResponse().getStatus());
+
+ final List allSearchUrls = myResourceSearchUrlDao.findAll();
+
+ assertThat(allSearchUrls).hasSize(2);
+
+ final String resolvedSearchUrl = "Task?identifier=http%3A%2F%2Ftempuri.org%7C1";
+
+ final ResourceSearchUrlEntity resourceSearchUrlEntity1 = allSearchUrls.get(0);
+ final ResourceSearchUrlEntity resourceSearchUrlEntity2 = allSearchUrls.get(1);
+
+ assertThat(resourceSearchUrlEntity1.getSearchUrl()).isEqualTo(resolvedSearchUrl);
+ assertThat(resourceSearchUrlEntity1.getPartitionId()).isEqualTo(partitionEntity1.getId());
+
+ assertThat(resourceSearchUrlEntity2.getSearchUrl()).isEqualTo(resolvedSearchUrl);
+ assertThat(resourceSearchUrlEntity2.getPartitionId()).isEqualTo(partitionEntity2.getId());
+ }
+
private void assertRemainingTasks(Task... theExpectedTasks) {
final List searchUrlsPreDelete = myResourceSearchUrlDao.findAll();
@@ -1352,6 +1412,14 @@ public class FhirResourceDaoR4CreateTest extends BaseJpaR4Test {
}
}
+ private List sendBundleAndGetResponse(IBaseBundle theRequestBundle, RequestPartitionId thePartitionId) {
+ assertThat(theRequestBundle).isInstanceOf(Bundle.class);
+
+ final SystemRequestDetails requestDetails = new SystemRequestDetails();
+ requestDetails.setRequestPartitionId(thePartitionId);
+ return mySystemDao.transaction(requestDetails, (Bundle)theRequestBundle).getEntry();
+ }
+
private List sendBundleAndGetResponse(IBaseBundle theRequestBundle) {
assertTrue(theRequestBundle instanceof Bundle);
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/JpaEmbeddedDatabase.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/JpaEmbeddedDatabase.java
index 8fc3c268b81..6efb321aa1a 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/JpaEmbeddedDatabase.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/JpaEmbeddedDatabase.java
@@ -113,8 +113,7 @@ public abstract class JpaEmbeddedDatabase {
}
public void executeSqlAsBatch(List theStatements) {
- try {
- Statement statement = myConnection.createStatement();
+ try (final Statement statement = myConnection.createStatement()) {
for (String sql : theStatements) {
if (!StringUtils.isBlank(sql)) {
statement.addBatch(sql);
diff --git a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/embedded/HapiSchemaMigrationTest.java b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/embedded/HapiSchemaMigrationTest.java
index 49c64fdc8ac..2bd04e8eb4c 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/embedded/HapiSchemaMigrationTest.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/test/java/ca/uhn/fhir/jpa/embedded/HapiSchemaMigrationTest.java
@@ -10,6 +10,8 @@ import ca.uhn.fhir.jpa.migrate.tasks.HapiFhirJpaMigrationTasks;
import ca.uhn.fhir.system.HapiSystemProperties;
import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
import ca.uhn.fhir.util.VersionEnum;
+import jakarta.annotation.Nonnull;
+import jakarta.annotation.Nullable;
import org.apache.commons.dbcp2.BasicDataSource;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
@@ -21,8 +23,19 @@ import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
import java.sql.SQLException;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
import java.util.Properties;
import static ca.uhn.fhir.jpa.embedded.HapiEmbeddedDatabasesExtension.FIRST_TESTED_VERSION;
@@ -38,6 +51,20 @@ public class HapiSchemaMigrationTest {
private static final Logger ourLog = LoggerFactory.getLogger(HapiSchemaMigrationTest.class);
public static final String TEST_SCHEMA_NAME = "test";
+ private static final String METADATA_COLUMN_NAME = "COLUMN_NAME";
+ private static final String METADATA_DATA_TYPE = "DATA_TYPE";
+ private static final String METADATA_IS_NULLABLE = "IS_NULLABLE";
+ private static final String METADATA_DEFAULT_VALUE = "COLUMN_DEF";
+ private static final String METADATA_IS_NULLABLE_NO = "NO";
+ private static final String METADATA_IS_NULLABLE_YES = "YES";
+
+ private static final String TABLE_HFJ_RES_SEARCH_URL = "HFJ_RES_SEARCH_URL";
+ private static final String COLUMN_RES_SEARCH_URL = "RES_SEARCH_URL";
+ private static final String COLUMN_PARTITION_ID = "PARTITION_ID";
+ private static final String COLUMN_PARTITION_DATE = "PARTITION_DATE";
+
+ private static final String NULL_PLACEHOLDER = "[NULL]";
+
static {
HapiSystemProperties.enableUnitTestMode();
}
@@ -92,10 +119,131 @@ public class HapiSchemaMigrationTest {
}
verifyForcedIdMigration(dataSource);
+
+ verifyHfjResSearchUrlMigration(database, theDriverType);
}
- private static void migrate(DriverTypeEnum theDriverType, DataSource dataSource, HapiMigrationStorageSvc hapiMigrationStorageSvc, VersionEnum to) throws SQLException {
- MigrationTaskList migrationTasks = new HapiFhirJpaMigrationTasks(Collections.emptySet()).getAllTasks(new VersionEnum[]{to});
+ /**
+ * We start with a single record in HFJ_RES_SEARCH_URL:
+ *
+ *
+ *
Primary key: ONLY RES_SEARCH_URL
+ *
PK: RES_SEARCH_URL: https://example.com
+ *
CREATED_TIME: 2023-06-29 10:14:39.69
+ *
RES_ID: 1678
+ *
+ *
+ * Once the migration is complete, we should have:
+ *
+ *
Primary key: RES_SEARCH_URL, PARTITION_ID
+ *
PK: RES_SEARCH_URL: https://example.com
+ *
PK: PARTITION_ID: -1
+ *
CREATED_TIME: 2023-06-29 10:14:39.69
+ *
RES_ID: 1678
+ *
PARTITION_DATE: null
+ *
+ */
+ private void verifyHfjResSearchUrlMigration(JpaEmbeddedDatabase theDatabase, DriverTypeEnum theDriverType) throws SQLException {
+ final List